[ 472.792986] env[62923]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62923) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 472.793438] env[62923]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62923) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 472.793438] env[62923]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62923) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 472.793773] env[62923]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 472.887448] env[62923]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62923) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 472.897431] env[62923]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62923) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 473.504018] env[62923]: INFO nova.virt.driver [None req-e187ba6a-c3f1-4c69-970e-244bd1c86e01 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 473.575493] env[62923]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 473.575844] env[62923]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 473.576113] env[62923]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62923) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 476.705973] env[62923]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-185d5e93-ee45-492a-81c7-543b43202fbe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.721691] env[62923]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62923) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 476.721861] env[62923]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2c153fd3-5176-495d-b89d-a391253b4e70 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.746408] env[62923]: INFO oslo_vmware.api [-] Successfully established new session; session ID is cc3fe. [ 476.746548] env[62923]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.171s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 476.747105] env[62923]: INFO nova.virt.vmwareapi.driver [None req-e187ba6a-c3f1-4c69-970e-244bd1c86e01 None None] VMware vCenter version: 7.0.3 [ 476.750471] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df190669-632d-4e22-8199-b3449c7681f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.767925] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cc7ce4-85ca-465f-ac08-49dccdba7c38 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.773590] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d90e89-bb65-47bd-a69c-368d0b36ef80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.779882] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35744ae8-eb7c-46cc-b34b-28d05d15ddef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.792762] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bcac38-797c-416b-8887-9420d454dbc4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.798484] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ef5f1f-90a3-489d-a4d5-fadf4f93b035 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.828515] env[62923]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-0c5459e0-0108-43c2-b77d-1397f431da13 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.833384] env[62923]: DEBUG nova.virt.vmwareapi.driver [None req-e187ba6a-c3f1-4c69-970e-244bd1c86e01 None None] Extension org.openstack.compute already exists. {{(pid=62923) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 476.836409] env[62923]: INFO nova.compute.provider_config [None req-e187ba6a-c3f1-4c69-970e-244bd1c86e01 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 477.341069] env[62923]: DEBUG nova.context [None req-e187ba6a-c3f1-4c69-970e-244bd1c86e01 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),7c385852-dcd3-47ff-aed8-fb74e138993a(cell1) {{(pid=62923) load_cells /opt/stack/nova/nova/context.py:464}} [ 477.342564] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 477.342817] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 477.343505] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 477.343933] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Acquiring lock "7c385852-dcd3-47ff-aed8-fb74e138993a" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 477.344175] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Lock "7c385852-dcd3-47ff-aed8-fb74e138993a" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 477.345197] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Lock "7c385852-dcd3-47ff-aed8-fb74e138993a" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 477.365517] env[62923]: INFO dbcounter [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Registered counter for database nova_cell0 [ 477.373671] env[62923]: INFO dbcounter [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Registered counter for database nova_cell1 [ 477.377155] env[62923]: DEBUG oslo_db.sqlalchemy.engines [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62923) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 477.377510] env[62923]: DEBUG oslo_db.sqlalchemy.engines [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62923) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 477.382290] env[62923]: ERROR nova.db.main.api [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 477.382290] env[62923]: result = function(*args, **kwargs) [ 477.382290] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 477.382290] env[62923]: return func(*args, **kwargs) [ 477.382290] env[62923]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 477.382290] env[62923]: result = fn(*args, **kwargs) [ 477.382290] env[62923]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 477.382290] env[62923]: return f(*args, **kwargs) [ 477.382290] env[62923]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 477.382290] env[62923]: return db.service_get_minimum_version(context, binaries) [ 477.382290] env[62923]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 477.382290] env[62923]: _check_db_access() [ 477.382290] env[62923]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 477.382290] env[62923]: stacktrace = ''.join(traceback.format_stack()) [ 477.382290] env[62923]: [ 477.383095] env[62923]: ERROR nova.db.main.api [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 477.383095] env[62923]: result = function(*args, **kwargs) [ 477.383095] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 477.383095] env[62923]: return func(*args, **kwargs) [ 477.383095] env[62923]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 477.383095] env[62923]: result = fn(*args, **kwargs) [ 477.383095] env[62923]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 477.383095] env[62923]: return f(*args, **kwargs) [ 477.383095] env[62923]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 477.383095] env[62923]: return db.service_get_minimum_version(context, binaries) [ 477.383095] env[62923]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 477.383095] env[62923]: _check_db_access() [ 477.383095] env[62923]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 477.383095] env[62923]: stacktrace = ''.join(traceback.format_stack()) [ 477.383095] env[62923]: [ 477.383508] env[62923]: WARNING nova.objects.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 477.383686] env[62923]: WARNING nova.objects.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Failed to get minimum service version for cell 7c385852-dcd3-47ff-aed8-fb74e138993a [ 477.384110] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Acquiring lock "singleton_lock" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 477.384279] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Acquired lock "singleton_lock" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 477.384518] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Releasing lock "singleton_lock" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 477.384837] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Full set of CONF: {{(pid=62923) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 477.384979] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ******************************************************************************** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 477.385125] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Configuration options gathered from: {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 477.385264] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 477.385453] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 477.385582] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ================================================================================ {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 477.385787] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] allow_resize_to_same_host = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.385956] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] arq_binding_timeout = 300 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.386106] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] backdoor_port = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.386236] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] backdoor_socket = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.386399] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] block_device_allocate_retries = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.386559] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] block_device_allocate_retries_interval = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.386728] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cert = self.pem {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.386891] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.387240] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute_monitors = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.387443] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] config_dir = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.387622] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] config_drive_format = iso9660 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.387760] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.387927] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] config_source = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.388117] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] console_host = devstack {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.388288] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] control_exchange = nova {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.388448] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cpu_allocation_ratio = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.388609] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] daemon = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.388781] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] debug = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.388941] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] default_access_ip_network_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.389123] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] default_availability_zone = nova {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.389284] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] default_ephemeral_format = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.389446] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] default_green_pool_size = 1000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.389686] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.389848] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] default_schedule_zone = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.390024] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] disk_allocation_ratio = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.390186] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] enable_new_services = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.390368] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] enabled_apis = ['osapi_compute'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.390533] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] enabled_ssl_apis = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.390696] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] flat_injected = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.390853] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] force_config_drive = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.391016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] force_raw_images = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.391226] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] graceful_shutdown_timeout = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.391396] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] heal_instance_info_cache_interval = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.391608] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] host = cpu-1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.391783] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.391946] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] initial_disk_allocation_ratio = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.392122] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] initial_ram_allocation_ratio = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.392361] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.392531] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_build_timeout = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.392694] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_delete_interval = 300 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.392862] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_format = [instance: %(uuid)s] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.393040] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_name_template = instance-%08x {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.393208] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_usage_audit = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.393383] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_usage_audit_period = month {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.393580] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.393755] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] instances_path = /opt/stack/data/nova/instances {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.393923] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] internal_service_availability_zone = internal {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.394096] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] key = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.394261] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] live_migration_retry_count = 30 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.394430] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_color = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.394594] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_config_append = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.394760] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.394919] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_dir = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.395091] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.395225] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_options = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.395389] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_rotate_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.395557] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_rotate_interval_type = days {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.395758] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] log_rotation_type = none {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.395923] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.396087] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.396297] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.396490] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.396644] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.396846] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] long_rpc_timeout = 1800 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397031] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] max_concurrent_builds = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397197] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] max_concurrent_live_migrations = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397357] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] max_concurrent_snapshots = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397517] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] max_local_block_devices = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397674] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] max_logfile_count = 30 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397830] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] max_logfile_size_mb = 200 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.397986] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] maximum_instance_delete_attempts = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.398167] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metadata_listen = 0.0.0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.398338] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metadata_listen_port = 8775 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.398506] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metadata_workers = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.398668] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] migrate_max_retries = -1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.398834] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] mkisofs_cmd = genisoimage {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.399051] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] my_block_storage_ip = 10.180.1.21 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.399188] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] my_ip = 10.180.1.21 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.399355] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] network_allocate_retries = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.399533] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.399699] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] osapi_compute_listen = 0.0.0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.399861] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] osapi_compute_listen_port = 8774 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.400037] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] osapi_compute_unique_server_name_scope = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.400211] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] osapi_compute_workers = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.400378] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] password_length = 12 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.400537] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] periodic_enable = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.400697] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] periodic_fuzzy_delay = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.400864] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] pointer_model = usbtablet {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.401040] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] preallocate_images = none {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.401232] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] publish_errors = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.401371] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] pybasedir = /opt/stack/nova {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.401531] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ram_allocation_ratio = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.401694] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] rate_limit_burst = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.401860] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] rate_limit_except_level = CRITICAL {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.402035] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] rate_limit_interval = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.402215] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reboot_timeout = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.402395] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reclaim_instance_interval = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.402558] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] record = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.402728] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reimage_timeout_per_gb = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.402895] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] report_interval = 120 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.403070] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] rescue_timeout = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.403235] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reserved_host_cpus = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.403399] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reserved_host_disk_mb = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.403588] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reserved_host_memory_mb = 512 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.403755] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] reserved_huge_pages = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.403915] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] resize_confirm_window = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.404087] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] resize_fs_using_block_device = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.404250] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] resume_guests_state_on_host_boot = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.404417] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.404579] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] rpc_response_timeout = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.404737] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] run_external_periodic_tasks = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.404904] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] running_deleted_instance_action = reap {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.405072] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] running_deleted_instance_poll_interval = 1800 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.405234] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] running_deleted_instance_timeout = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.405392] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler_instance_sync_interval = 120 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.405556] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_down_time = 720 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.405722] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] servicegroup_driver = db {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.405878] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] shell_completion = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.406046] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] shelved_offload_time = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.406207] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] shelved_poll_interval = 3600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.406374] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] shutdown_timeout = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.406532] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] source_is_ipv6 = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.406688] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ssl_only = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.406931] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.407110] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] sync_power_state_interval = 600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.407272] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] sync_power_state_pool_size = 1000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.407437] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] syslog_log_facility = LOG_USER {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.407593] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] tempdir = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.407750] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] timeout_nbd = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.407915] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] transport_url = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.408086] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] update_resources_interval = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.408248] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_cow_images = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.408406] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_eventlog = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.408561] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_journal = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.408718] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_json = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.408876] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_rootwrap_daemon = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.409040] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_stderr = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.409203] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] use_syslog = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.409361] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vcpu_pin_set = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.409526] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plugging_is_fatal = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.409694] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plugging_timeout = 300 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.409859] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] virt_mkfs = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.410028] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] volume_usage_poll_interval = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.410194] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] watch_log_file = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.410359] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] web = /usr/share/spice-html5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 477.410539] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.410704] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.410865] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.411043] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_concurrency.disable_process_locking = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.411619] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.411814] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.411986] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.412177] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.412382] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.412555] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.412738] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.auth_strategy = keystone {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.412907] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.compute_link_prefix = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.413098] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.413282] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.dhcp_domain = novalocal {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.413474] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.enable_instance_password = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.413658] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.glance_link_prefix = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.413830] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.414018] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.414187] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.instance_list_per_project_cells = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.414349] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.list_records_by_skipping_down_cells = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.414513] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.local_metadata_per_cell = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.414683] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.max_limit = 1000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.414852] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.metadata_cache_expiration = 15 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.415034] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.neutron_default_tenant_id = default {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.415211] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.response_validation = warn {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.415383] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.use_neutron_default_nets = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.415550] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.415709] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.415874] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.416053] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.416228] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_dynamic_targets = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.416392] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_jsonfile_path = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.416568] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.416761] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.backend = dogpile.cache.memcached {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.416931] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.backend_argument = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.417115] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.config_prefix = cache.oslo {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.417290] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.dead_timeout = 60.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.417456] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.debug_cache_backend = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.417619] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.enable_retry_client = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.417781] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.enable_socket_keepalive = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.417951] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.enabled = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.418132] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.enforce_fips_mode = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.418299] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.expiration_time = 600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.418461] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.hashclient_retry_attempts = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.418633] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.hashclient_retry_delay = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.418790] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_dead_retry = 300 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.418948] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_password = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.419123] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.419291] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.419452] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_pool_maxsize = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.419612] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.419772] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_sasl_enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.419949] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.420128] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_socket_timeout = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.420294] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.memcache_username = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.420459] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.proxies = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.420624] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_db = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.420783] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_password = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.420953] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_sentinel_service_name = mymaster {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.421160] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.421347] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_server = localhost:6379 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.421517] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_socket_timeout = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.421705] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.redis_username = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.421956] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.retry_attempts = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.422182] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.retry_delay = 0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.422369] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.socket_keepalive_count = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.422535] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.socket_keepalive_idle = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.422697] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.socket_keepalive_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.422854] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.tls_allowed_ciphers = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.423016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.tls_cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.423181] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.tls_certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.423341] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.tls_enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.423499] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cache.tls_keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.423669] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.423842] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.auth_type = password {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.424009] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.424194] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.catalog_info = volumev3::publicURL {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.424358] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.424521] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.424681] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.cross_az_attach = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.424842] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.debug = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425007] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.endpoint_template = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425177] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.http_retries = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425339] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425497] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425667] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.os_region_name = RegionOne {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425833] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.425990] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cinder.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.426176] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.426340] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.cpu_dedicated_set = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.426498] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.cpu_shared_set = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.426662] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.image_type_exclude_list = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.426823] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.426984] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.max_concurrent_disk_ops = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.427163] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.max_disk_devices_to_attach = -1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.427328] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.427497] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.427658] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.resource_provider_association_refresh = 300 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.427818] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.427978] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.shutdown_retry_interval = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.428169] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.428350] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] conductor.workers = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.428527] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] console.allowed_origins = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.428689] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] console.ssl_ciphers = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.428858] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] console.ssl_minimum_version = default {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.429037] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] consoleauth.enforce_session_timeout = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.429208] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] consoleauth.token_ttl = 600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.429380] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.429538] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.429696] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.429854] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430024] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430180] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430345] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430503] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430660] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430817] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.430974] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.region_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.431173] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.431345] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.431517] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.service_type = accelerator {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.431677] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.431834] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.431990] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.432162] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.432366] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.432532] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] cyborg.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.432713] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.backend = sqlalchemy {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.432884] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.connection = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.433060] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.connection_debug = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.433541] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.connection_parameters = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.433541] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.connection_recycle_time = 3600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.433541] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.connection_trace = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.433696] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.db_inc_retry_interval = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.433860] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.db_max_retries = 20 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.434031] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.db_max_retry_interval = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.434198] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.db_retry_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.434361] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.max_overflow = 50 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.434521] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.max_pool_size = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.434682] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.max_retries = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.434849] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435013] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.mysql_wsrep_sync_wait = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435176] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.pool_timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435339] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.retry_interval = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435496] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.slave_connection = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435656] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.sqlite_synchronous = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435815] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] database.use_db_reconnect = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.435994] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.backend = sqlalchemy {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.436176] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.connection = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.436344] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.connection_debug = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.436512] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.connection_parameters = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.436676] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.connection_recycle_time = 3600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.436837] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.connection_trace = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.436996] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.db_inc_retry_interval = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.437172] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.db_max_retries = 20 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.437335] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.db_max_retry_interval = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.437493] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.db_retry_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.437651] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.max_overflow = 50 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.437811] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.max_pool_size = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.437972] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.max_retries = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.438156] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.438320] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.438479] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.pool_timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.438641] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.retry_interval = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.438799] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.slave_connection = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.438958] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] api_database.sqlite_synchronous = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.439147] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] devices.enabled_mdev_types = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.439328] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.439498] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ephemeral_storage_encryption.default_format = luks {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.439658] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ephemeral_storage_encryption.enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.439823] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.439993] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.api_servers = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.440175] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.440341] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.440505] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.440664] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.440821] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.440983] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.debug = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.441184] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.default_trusted_certificate_ids = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.441358] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.enable_certificate_validation = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.441520] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.enable_rbd_download = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.441680] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.441842] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.442018] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.442183] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.442373] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.442548] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.num_retries = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.442718] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.rbd_ceph_conf = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.442881] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.rbd_connect_timeout = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.443063] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.rbd_pool = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.443234] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.rbd_user = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.443398] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.region_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.443557] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.443714] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.443885] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.service_type = image {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.444058] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.444232] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.444395] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.444554] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.444734] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.444899] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.verify_glance_signatures = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.445070] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] glance.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.445240] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] guestfs.debug = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.445414] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] mks.enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.445764] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.445953] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] image_cache.manager_interval = 2400 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.446138] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] image_cache.precache_concurrency = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.446313] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] image_cache.remove_unused_base_images = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.446481] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.446650] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.446825] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] image_cache.subdirectory_name = _base {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447019] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.api_max_retries = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447182] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.api_retry_interval = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447345] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447505] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.auth_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447667] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447826] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.447989] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.448165] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.conductor_group = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.448326] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.448486] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.448645] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.448809] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.448968] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.449138] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.449299] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.449464] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.peer_list = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.449621] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.region_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.449778] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.449940] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.serial_console_state_timeout = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.450113] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.450286] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.service_type = baremetal {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.450445] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.shard = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.450609] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.450769] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.450927] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.451110] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.451308] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.451476] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ironic.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.451659] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.451832] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] key_manager.fixed_key = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.452023] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.452225] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.barbican_api_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.452408] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.barbican_endpoint = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.452584] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.barbican_endpoint_type = public {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.452744] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.barbican_region_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.452904] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.453075] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.453241] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.453405] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.453562] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.453726] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.number_of_retries = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.453889] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.retry_delay = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.454061] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.send_service_user_token = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.454228] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.454386] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.454547] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.verify_ssl = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.454705] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican.verify_ssl_path = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.454869] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455041] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.auth_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455206] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455367] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455528] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455685] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455839] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.455999] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.456167] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] barbican_service_user.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.456334] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.approle_role_id = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.456492] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.approle_secret_id = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.456658] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.kv_mountpoint = secret {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.456816] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.kv_path = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.456979] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.kv_version = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.457151] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.namespace = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.457312] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.root_token_id = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.457468] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.ssl_ca_crt_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.457633] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.timeout = 60.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.457791] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.use_ssl = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.457960] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.458144] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.458312] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.auth_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.458471] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.458631] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.458792] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.458950] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.459123] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.459285] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.459446] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.459603] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.459758] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.459914] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.460086] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.region_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.460249] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.460410] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.460587] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.service_type = identity {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.460813] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.460983] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.461186] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.461353] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.461534] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.461694] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] keystone.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.461894] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.connection_uri = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.462065] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_mode = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.462262] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_model_extra_flags = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.462435] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_models = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.462607] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_power_governor_high = performance {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.462774] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_power_governor_low = powersave {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.462937] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_power_management = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.463121] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.463289] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.device_detach_attempts = 8 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.463452] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.device_detach_timeout = 20 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.463617] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.disk_cachemodes = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.463774] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.disk_prefix = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.463935] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.enabled_perf_events = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.464107] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.file_backed_memory = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.464321] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.gid_maps = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.464541] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.hw_disk_discard = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.465565] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.hw_machine_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.465565] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_rbd_ceph_conf = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.465565] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.465565] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.465565] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_rbd_glance_store_name = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.465792] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_rbd_pool = rbd {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.466021] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_type = default {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.466196] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.images_volume_group = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.466360] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.inject_key = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.466528] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.inject_partition = -2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.466693] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.inject_password = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.466856] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.iscsi_iface = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.467027] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.iser_use_multipath = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.467192] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_bandwidth = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.467359] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.467521] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_downtime = 500 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.467683] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.467914] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.468859] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_inbound_addr = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.468859] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.468859] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_permit_post_copy = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.468859] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_scheme = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.469090] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_timeout_action = abort {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.469226] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_tunnelled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.469413] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_uri = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.469586] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.live_migration_with_native_tls = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.469745] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.max_queues = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.nfs_mount_options = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.num_iser_scan_tries = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473016] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.num_memory_encrypted_guests = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.num_pcie_ports = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.num_volume_scan_tries = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.pmem_namespaces = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.quobyte_client_cfg = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473254] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rbd_connect_timeout = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473427] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473427] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473427] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rbd_secret_uuid = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473427] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rbd_user = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473427] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473427] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.remote_filesystem_transport = ssh {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473577] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rescue_image_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473716] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rescue_kernel_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.473839] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rescue_ramdisk_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.474013] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.474198] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.rx_queue_size = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.474387] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.smbfs_mount_options = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.474662] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.474828] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.snapshot_compression = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.474988] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.snapshot_image_format = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.475239] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.475418] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.sparse_logical_volumes = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.475587] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.swtpm_enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.475757] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.swtpm_group = tss {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.475923] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.swtpm_user = tss {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.476113] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.sysinfo_serial = unique {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.476276] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.tb_cache_size = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.476432] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.tx_queue_size = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.476597] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.uid_maps = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.476757] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.use_virtio_for_bridges = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.476925] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.virt_type = kvm {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.477110] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.volume_clear = zero {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.477304] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.volume_clear_size = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.477478] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.volume_use_multipath = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.477640] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_cache_path = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.477808] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.477975] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_mount_group = qemu {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.478156] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_mount_opts = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.478358] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.478637] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.478816] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.vzstorage_mount_user = stack {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.478985] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.479178] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.479356] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.auth_type = password {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.479519] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.479682] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.479847] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.480014] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.480183] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.480383] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.default_floating_pool = public {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.480549] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.480712] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.extension_sync_interval = 600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.480876] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.http_retries = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.481049] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.481258] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.481437] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.481611] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.481772] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.481941] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.ovs_bridge = br-int {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.482123] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.physnets = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.482296] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.region_name = RegionOne {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.482460] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.482630] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.service_metadata_proxy = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.482791] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.482960] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.service_type = network {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.483138] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.483322] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.483491] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.483651] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.483834] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.483994] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] neutron.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.484199] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] notifications.bdms_in_notifications = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.484399] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] notifications.default_level = INFO {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.484578] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] notifications.notification_format = unversioned {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.484743] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] notifications.notify_on_state_change = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.484921] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.485111] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] pci.alias = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.485289] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] pci.device_spec = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.485455] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] pci.report_in_placement = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.485628] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.485799] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.auth_type = password {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.485966] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.486139] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.486336] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.486510] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.486669] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.486828] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.486985] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.default_domain_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.487157] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.default_domain_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.487345] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.domain_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.487512] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.domain_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.487671] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.487831] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.487988] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.488158] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.488315] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.488801] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.password = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.488801] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.project_domain_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.488801] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.project_domain_name = Default {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.488951] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.project_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.489127] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.project_name = service {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.489324] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.region_name = RegionOne {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.489498] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.489661] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.489830] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.service_type = placement {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.489991] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.490163] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.490348] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.490514] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.system_scope = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.490673] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.490829] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.trust_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.490984] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.user_domain_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.491199] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.user_domain_name = Default {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.491367] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.user_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.491542] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.username = nova {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.491723] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.491884] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] placement.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.492072] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.cores = 20 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.492269] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.count_usage_from_placement = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.492445] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.492621] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.injected_file_content_bytes = 10240 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.492786] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.injected_file_path_length = 255 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.492951] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.injected_files = 5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.493129] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.instances = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.493321] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.key_pairs = 100 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.493504] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.metadata_items = 128 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.493672] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.ram = 51200 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.493837] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.recheck_quota = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.494013] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.server_group_members = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.494188] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] quota.server_groups = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.494363] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.494529] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.494691] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.image_metadata_prefilter = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.494852] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.495025] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.max_attempts = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.495213] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.max_placement_results = 1000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.495389] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.495551] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.query_placement_for_image_type_support = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.495713] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.495886] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] scheduler.workers = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.496072] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.496271] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.496472] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.496646] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.496815] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.496981] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.497162] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.497356] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.497525] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.host_subset_size = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.497689] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.497849] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.498026] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.498231] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.isolated_hosts = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.498420] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.isolated_images = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.498597] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.498764] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.498930] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.499112] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.pci_in_placement = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.499304] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.499492] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.499662] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.499823] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.499990] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.500170] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.500334] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.track_instance_changes = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.500512] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.500683] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metrics.required = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.500846] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metrics.weight_multiplier = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.501018] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.501209] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] metrics.weight_setting = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.501533] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.501707] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] serial_console.enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.501884] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] serial_console.port_range = 10000:20000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.502070] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.502270] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.502448] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] serial_console.serialproxy_port = 6083 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.502617] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.502792] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.auth_type = password {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.502955] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.503132] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.503297] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.503460] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.503618] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.503790] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.send_service_user_token = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.503953] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.504140] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] service_user.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.504316] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.agent_enabled = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.504479] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.504792] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.504986] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.505188] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.html5proxy_port = 6082 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.505366] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.image_compression = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.505529] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.jpeg_compression = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.505687] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.playback_compression = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.505847] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.require_secure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.506032] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.server_listen = 127.0.0.1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.506209] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.506371] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.streaming_mode = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.506530] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] spice.zlib_compression = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.506696] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] upgrade_levels.baseapi = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.506868] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] upgrade_levels.compute = auto {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.507049] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] upgrade_levels.conductor = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.507213] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] upgrade_levels.scheduler = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.507380] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.507541] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.auth_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.507699] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.507856] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.508036] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.508234] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.508410] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.508576] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.508734] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vendordata_dynamic_auth.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.508908] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.api_retry_count = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.509081] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.ca_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.509258] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.cache_prefix = devstack-image-cache {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.509430] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.cluster_name = testcl1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.509595] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.connection_pool_size = 10 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.509755] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.console_delay_seconds = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.509921] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.datastore_regex = ^datastore.* {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.510142] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.510318] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.host_password = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.510486] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.host_port = 443 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.510654] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.host_username = administrator@vsphere.local {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.510824] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.insecure = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.510987] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.integration_bridge = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.511196] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.maximum_objects = 100 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.511380] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.pbm_default_policy = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.511546] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.pbm_enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.511708] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.pbm_wsdl_location = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.511879] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.512052] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.serial_port_proxy_uri = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.512222] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.serial_port_service_uri = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.512390] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.task_poll_interval = 0.5 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.512560] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.use_linked_clone = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.512730] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.vnc_keymap = en-us {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.512897] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.vnc_port = 5900 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.513070] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vmware.vnc_port_total = 10000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.513262] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.auth_schemes = ['none'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.513440] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.513730] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.513916] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.514099] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.novncproxy_port = 6080 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.514300] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.server_listen = 127.0.0.1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.514486] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.514650] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.vencrypt_ca_certs = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.514810] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.vencrypt_client_cert = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.514970] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vnc.vencrypt_client_key = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.515167] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.515339] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.disable_deep_image_inspection = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.515497] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.515657] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.515818] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.515978] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.disable_rootwrap = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.516154] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.enable_numa_live_migration = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.516318] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.516478] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.516638] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.516799] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.libvirt_disable_apic = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.516958] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.517134] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.517319] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.517492] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.517653] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.517811] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.517971] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.518145] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.518308] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.518472] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.518655] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.518822] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.client_socket_timeout = 900 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.518988] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.default_pool_size = 1000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.519166] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.keep_alive = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.519332] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.max_header_line = 16384 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.519495] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.secure_proxy_ssl_header = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.519653] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.ssl_ca_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.519811] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.ssl_cert_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.519968] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.ssl_key_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.520144] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.tcp_keepidle = 600 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.520353] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.520525] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] zvm.ca_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.520685] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] zvm.cloud_connector_url = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.520972] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.521199] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] zvm.reachable_timeout = 300 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.521429] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.enforce_new_defaults = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.521830] env[62923]: WARNING oslo_config.cfg [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 477.522026] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.enforce_scope = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.522222] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.policy_default_rule = default {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.522487] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.522682] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.policy_file = policy.yaml {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.522858] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.523034] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.523203] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.523369] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.523535] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.523743] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.523944] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.524139] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.connection_string = messaging:// {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.524324] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.enabled = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.524529] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.es_doc_type = notification {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.524704] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.es_scroll_size = 10000 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.524906] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.es_scroll_time = 2m {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.525115] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.filter_error_trace = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.525309] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.hmac_keys = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.525538] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.sentinel_service_name = mymaster {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.525721] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.socket_timeout = 0.1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.525887] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.trace_requests = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.526061] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler.trace_sqlalchemy = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.526248] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler_jaeger.process_tags = {} {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.526414] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler_jaeger.service_name_prefix = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.526579] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] profiler_otlp.service_name_prefix = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.526744] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] remote_debug.host = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.526905] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] remote_debug.port = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.527098] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.527264] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.527429] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.527592] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.527751] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.527909] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.528102] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.528303] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.528502] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.528682] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.528846] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.529028] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.529202] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.529376] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.529545] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.529711] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.529871] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.530055] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.530222] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.530387] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.530552] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.530716] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.530878] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.531052] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.531242] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.531431] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.531615] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.531779] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.531947] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.532127] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.ssl = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.532306] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.532477] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.532640] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.532809] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.532976] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.ssl_version = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.533152] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.533344] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.533538] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_notifications.retry = -1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.533737] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.533920] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_messaging_notifications.transport_url = **** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.534107] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.auth_section = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.534275] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.auth_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.534437] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.cafile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.534594] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.certfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.534754] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.collect_timing = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.534912] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.connect_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.535081] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.connect_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.535242] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.endpoint_id = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.535406] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.endpoint_override = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.535561] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.insecure = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.535717] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.keyfile = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.535871] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.max_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.536035] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.min_version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.536195] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.region_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.536356] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.retriable_status_codes = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.536535] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.service_name = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.536707] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.service_type = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.536869] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.split_loggers = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537034] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.status_code_retries = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537195] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.status_code_retry_delay = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537353] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.timeout = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537509] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.valid_interfaces = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537665] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_limit.version = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537829] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_reports.file_event_handler = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.537992] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.538166] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] oslo_reports.log_dir = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.538337] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.538497] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.538657] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.538823] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.538989] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.539163] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.539336] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.539512] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_ovs_privileged.group = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.539685] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.539853] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.540024] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.540191] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] vif_plug_ovs_privileged.user = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.540364] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.flat_interface = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.540543] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.540716] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.540889] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.541073] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.541277] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.541454] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.541619] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.541798] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.541967] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.isolate_vif = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.542171] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.542361] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.542532] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.542707] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.ovsdb_interface = native {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.542871] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] os_vif_ovs.per_port_bridge = False {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.543053] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] privsep_osbrick.capabilities = [21] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.543220] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] privsep_osbrick.group = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.543381] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] privsep_osbrick.helper_command = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.543549] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.543714] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.543872] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] privsep_osbrick.user = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544054] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544219] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] nova_sys_admin.group = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544379] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] nova_sys_admin.helper_command = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544542] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544703] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544861] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] nova_sys_admin.user = None {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 477.544991] env[62923]: DEBUG oslo_service.service [None req-0ae11975-74cd-4b34-8320-3980f5fdb70a None None] ******************************************************************************** {{(pid=62923) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 477.545509] env[62923]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 478.048829] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Getting list of instances from cluster (obj){ [ 478.048829] env[62923]: value = "domain-c8" [ 478.048829] env[62923]: _type = "ClusterComputeResource" [ 478.048829] env[62923]: } {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 478.050062] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602ddaf5-29bb-4697-b4d1-29d0d22143ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 478.059095] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Got total of 0 instances {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 478.059613] env[62923]: WARNING nova.virt.vmwareapi.driver [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 478.060116] env[62923]: INFO nova.virt.node [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Generated node identity a513b783-544c-421b-85ec-cfd6d6ee698d [ 478.060351] env[62923]: INFO nova.virt.node [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Wrote node identity a513b783-544c-421b-85ec-cfd6d6ee698d to /opt/stack/data/n-cpu-1/compute_id [ 478.563575] env[62923]: WARNING nova.compute.manager [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Compute nodes ['a513b783-544c-421b-85ec-cfd6d6ee698d'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 479.569188] env[62923]: INFO nova.compute.manager [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 480.575160] env[62923]: WARNING nova.compute.manager [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 480.575613] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 480.575697] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 480.575785] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 480.575940] env[62923]: DEBUG nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 480.576882] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fd54c7-bce4-488b-b656-4c79dbe129e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 480.585485] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f873a5-0925-4596-b87c-590c3a2fee30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 480.598804] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5bc6c0-4498-4785-a5a8-b3e25a11f845 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 480.604969] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee8becc-a1bf-42b6-ba72-b86236ef91e9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 480.634214] env[62923]: DEBUG nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181499MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 480.634346] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 480.634536] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 481.137515] env[62923]: WARNING nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] No compute node record for cpu-1:a513b783-544c-421b-85ec-cfd6d6ee698d: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host a513b783-544c-421b-85ec-cfd6d6ee698d could not be found. [ 481.640913] env[62923]: INFO nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: a513b783-544c-421b-85ec-cfd6d6ee698d [ 483.149224] env[62923]: DEBUG nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 483.149585] env[62923]: DEBUG nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 483.299781] env[62923]: INFO nova.scheduler.client.report [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] [req-2d8aa1b8-55e3-461f-9020-8a180ebb950a] Created resource provider record via placement API for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 483.315840] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ae00df-b1df-4c33-b595-cbfa7443676a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.323397] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2557ab91-5bbc-46c3-b6d3-7decfb3191dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.352683] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44546a73-f7bc-4ccc-90e6-4cfe2d06ce55 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.359538] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ee777d-2bf8-480c-9e08-62b073f0ad58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 483.371883] env[62923]: DEBUG nova.compute.provider_tree [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 483.908866] env[62923]: DEBUG nova.scheduler.client.report [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 483.909121] env[62923]: DEBUG nova.compute.provider_tree [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 0 to 1 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 483.909278] env[62923]: DEBUG nova.compute.provider_tree [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 483.958405] env[62923]: DEBUG nova.compute.provider_tree [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 1 to 2 during operation: update_traits {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 484.463343] env[62923]: DEBUG nova.compute.resource_tracker [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 484.463698] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.829s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 484.463800] env[62923]: DEBUG nova.service [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Creating RPC server for service compute {{(pid=62923) start /opt/stack/nova/nova/service.py:186}} [ 484.478996] env[62923]: DEBUG nova.service [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] Join ServiceGroup membership for this service compute {{(pid=62923) start /opt/stack/nova/nova/service.py:203}} [ 484.479213] env[62923]: DEBUG nova.servicegroup.drivers.db [None req-2093ff00-b1ae-438c-bdac-1606d9e518b0 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62923) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 484.479675] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 484.982745] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Getting list of instances from cluster (obj){ [ 484.982745] env[62923]: value = "domain-c8" [ 484.982745] env[62923]: _type = "ClusterComputeResource" [ 484.982745] env[62923]: } {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 484.983672] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c18dee1-3475-49e2-a5d2-6b99b8862f98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 484.992081] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Got total of 0 instances {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 484.992331] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 484.992623] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Getting list of instances from cluster (obj){ [ 484.992623] env[62923]: value = "domain-c8" [ 484.992623] env[62923]: _type = "ClusterComputeResource" [ 484.992623] env[62923]: } {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 484.993458] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4523d8e6-c4f0-4a0c-9685-1cbf31a7c8b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.000427] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Got total of 0 instances {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 521.768618] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquiring lock "04f788bb-19ea-456d-93eb-7398f5dbee35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.768899] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Lock "04f788bb-19ea-456d-93eb-7398f5dbee35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.271976] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 522.811299] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.811732] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.814161] env[62923]: INFO nova.compute.claims [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 523.310438] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.310438] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.814192] env[62923]: DEBUG nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 523.899382] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa9490a-a191-4f98-989c-c8f08e883917 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.908869] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32655611-26b7-46ce-be58-554e6299a3c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.950705] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3330fa2-c00e-4ef4-881c-3caabef0d463 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.959555] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63290f7e-0bc2-453d-b317-4402966fd660 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.974807] env[62923]: DEBUG nova.compute.provider_tree [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.169349] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.169349] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.349346] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.478831] env[62923]: DEBUG nova.scheduler.client.report [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 524.672553] env[62923]: DEBUG nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 524.859687] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "795b645d-3aee-4dd8-9537-2277f86c5b10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.860013] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "795b645d-3aee-4dd8-9537-2277f86c5b10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.984081] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.172s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.985014] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 524.991748] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.641s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.993601] env[62923]: INFO nova.compute.claims [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 525.215806] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.290605] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquiring lock "f81af398-7382-4433-9a24-07d16fd1223b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.290854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Lock "f81af398-7382-4433-9a24-07d16fd1223b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.362382] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 525.502600] env[62923]: DEBUG nova.compute.utils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.504967] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 525.505296] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 525.722849] env[62923]: DEBUG nova.policy [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd7f03d4480941498d67ffada7998f8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faf7bd9d689d4fb7a873910f0cc42bf2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 525.796194] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 525.898335] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.012773] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 526.136399] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1428d89-83fc-4812-ae65-6e98103bd904 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.146690] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d64d00e-8712-4439-8fa2-987fa0b200a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.186664] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d37827-f361-489d-ab57-153c4af1ffad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.192609] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9669a4a-9fc8-447d-aac6-9769882587ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.217263] env[62923]: DEBUG nova.compute.provider_tree [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.325695] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.685223] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Successfully created port: 53416306-bdfd-497d-bae2-1d6d08d01e4c {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.720356] env[62923]: DEBUG nova.scheduler.client.report [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 527.028023] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 527.081976] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.082412] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.082687] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.083032] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.083210] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.083414] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.084028] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.084028] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.084563] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.084782] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.085077] env[62923]: DEBUG nova.virt.hardware [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.086342] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370ab4b7-6f8a-4ae7-8532-574b375079ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.098396] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fac1580-d365-49be-9360-8d763fa9a013 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.123838] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d49adaa-1db8-4935-9c71-3faec543b31b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.227908] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.227908] env[62923]: DEBUG nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 527.233618] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.019s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.236255] env[62923]: INFO nova.compute.claims [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 527.735879] env[62923]: DEBUG nova.compute.utils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 527.738724] env[62923]: DEBUG nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 528.239475] env[62923]: DEBUG nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 528.355941] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2383db3d-5e40-46f0-87a9-1bbe6739477a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.370510] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a33369-7cc5-4ea4-b289-53e0e4052d57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.410423] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95819d6c-a257-48af-ad5f-da4839fd3448 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.418275] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e949e2bd-b4e8-4cd2-8231-1373e4809254 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.432187] env[62923]: DEBUG nova.compute.provider_tree [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.935618] env[62923]: DEBUG nova.scheduler.client.report [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 529.256418] env[62923]: DEBUG nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 529.288999] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 529.289219] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 529.289372] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 529.289544] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 529.289684] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 529.289857] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 529.290609] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 529.290710] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 529.290944] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 529.291180] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 529.291678] env[62923]: DEBUG nova.virt.hardware [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 529.293856] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37679ee-4b39-4079-b4eb-1a218ad0416f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.304502] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8c7cc9-c2e3-4286-8218-afc7591ff933 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.322823] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 529.333628] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 529.333951] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9336124-0927-4308-a088-4499d1233373 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.346122] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created folder: OpenStack in parent group-v4. [ 529.346322] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating folder: Project (b1d0d7091e524fa8833286b31182f8c6). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 529.346879] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eea21098-078b-46fa-b9d5-7c4ad00b98e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.356611] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created folder: Project (b1d0d7091e524fa8833286b31182f8c6) in parent group-v291405. [ 529.356808] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating folder: Instances. Parent ref: group-v291406. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 529.359525] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a00d4c90-cd76-4862-ad28-f3c57498c13b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.368936] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created folder: Instances in parent group-v291406. [ 529.369212] env[62923]: DEBUG oslo.service.loopingcall [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 529.369398] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 529.369632] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fef7586-0d9e-4a10-8da7-29da1670efd5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.388471] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 529.388471] env[62923]: value = "task-1369813" [ 529.388471] env[62923]: _type = "Task" [ 529.388471] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.398310] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369813, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.446507] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.446507] env[62923]: DEBUG nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 529.449461] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.551s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.451227] env[62923]: INFO nova.compute.claims [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.746531] env[62923]: ERROR nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 529.746531] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.746531] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.746531] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.746531] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.746531] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.746531] env[62923]: ERROR nova.compute.manager raise self.value [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.746531] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.746531] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.746531] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.747109] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.747109] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.747109] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 529.747109] env[62923]: ERROR nova.compute.manager [ 529.748775] env[62923]: Traceback (most recent call last): [ 529.748854] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.748854] env[62923]: listener.cb(fileno) [ 529.748854] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.748854] env[62923]: result = function(*args, **kwargs) [ 529.748854] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 529.748854] env[62923]: return func(*args, **kwargs) [ 529.748854] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.748854] env[62923]: raise e [ 529.748854] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.748854] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 529.748854] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.748854] env[62923]: created_port_ids = self._update_ports_for_instance( [ 529.748854] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.748854] env[62923]: with excutils.save_and_reraise_exception(): [ 529.748854] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.748854] env[62923]: self.force_reraise() [ 529.748854] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.748854] env[62923]: raise self.value [ 529.748854] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.748854] env[62923]: updated_port = self._update_port( [ 529.748854] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.748854] env[62923]: _ensure_no_port_binding_failure(port) [ 529.748854] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.748854] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.748854] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 529.748854] env[62923]: Removing descriptor: 15 [ 529.751039] env[62923]: ERROR nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Traceback (most recent call last): [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] yield resources [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self.driver.spawn(context, instance, image_meta, [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] vm_ref = self.build_virtual_machine(instance, [ 529.751039] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] for vif in network_info: [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return self._sync_wrapper(fn, *args, **kwargs) [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self.wait() [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self[:] = self._gt.wait() [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return self._exit_event.wait() [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.751401] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] result = hub.switch() [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return self.greenlet.switch() [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] result = function(*args, **kwargs) [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return func(*args, **kwargs) [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] raise e [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] nwinfo = self.network_api.allocate_for_instance( [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] created_port_ids = self._update_ports_for_instance( [ 529.751741] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] with excutils.save_and_reraise_exception(): [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self.force_reraise() [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] raise self.value [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] updated_port = self._update_port( [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] _ensure_no_port_binding_failure(port) [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] raise exception.PortBindingFailed(port_id=port['id']) [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 529.752161] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] [ 529.752502] env[62923]: INFO nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Terminating instance [ 529.755227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquiring lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.755227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquired lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.755227] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 529.900636] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369813, 'name': CreateVM_Task, 'duration_secs': 0.297016} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.901112] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 529.902675] env[62923]: DEBUG oslo_vmware.service [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237f8c78-1f4c-4097-a925-503c38a97382 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.909758] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.911672] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.911760] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 529.912242] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18be82de-8e15-4d1f-a067-195b1c25bbf7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.919463] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 529.919463] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5234ed56-5789-0ac5-3e9f-c922e57be642" [ 529.919463] env[62923]: _type = "Task" [ 529.919463] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.933021] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5234ed56-5789-0ac5-3e9f-c922e57be642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.957044] env[62923]: DEBUG nova.compute.utils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.962146] env[62923]: DEBUG nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 530.289974] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.392021] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.434712] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.434712] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 530.436473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.437127] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.437127] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 530.438577] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2dd964d-ec7a-4003-9eaa-0b0c76238736 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.457733] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 530.460041] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 530.460041] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54d8e0d-0426-4c39-91fb-7a94e4bd81e9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.463796] env[62923]: DEBUG nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 530.478699] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e06e73f9-3052-4881-ac29-c8bbe90c38ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.486370] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 530.486370] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52712b80-12a8-ac57-9f35-ac3f8e3fed96" [ 530.486370] env[62923]: _type = "Task" [ 530.486370] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.501394] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Preparing fetch location {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 530.501775] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating directory with path [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 530.504686] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e18185f-3507-4aa6-81ce-e620a3389595 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.528766] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created directory with path [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 530.528766] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Fetch image to [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 530.528766] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Downloading image file data cd84cf13-77b9-4bc1-bb15-31bece605a8e to [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk on the data store datastore1 {{(pid=62923) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 530.529563] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c67b85-f005-4f09-bc66-ed6bb071790c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.543357] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f51cb2-8c2c-46b7-bfa5-e990e5dc00b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.562062] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b290afa-7be0-4816-b40b-29402b036ae1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.603070] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96978add-8949-49fe-aa2a-736989e3e0d1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.610253] env[62923]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-85c035cf-905c-4868-bdd9-1a03aaa37e22 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.633691] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8203b047-c11d-40e8-845d-eaa44faf15dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.640982] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Downloading image file data cd84cf13-77b9-4bc1-bb15-31bece605a8e to the data store datastore1 {{(pid=62923) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 530.650893] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6888ff44-2aa8-4e05-9bd1-362b5f3715c5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.682608] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73875968-9285-445c-907b-f09595b17a81 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.690247] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65837d6-abf3-4c85-9fec-dae5e9e004a2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.707027] env[62923]: DEBUG nova.compute.provider_tree [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.752912] env[62923]: DEBUG oslo_vmware.rw_handles [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62923) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 530.896367] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Releasing lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.896999] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 530.897130] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 530.898642] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79f68558-50f7-4d40-8e11-8c31c04f47e9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.914967] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e9997f-e9cd-4284-92e8-c05b8e074491 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.946273] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04f788bb-19ea-456d-93eb-7398f5dbee35 could not be found. [ 530.946571] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 530.946935] env[62923]: INFO nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Took 0.05 seconds to destroy the instance on the hypervisor. [ 530.947200] env[62923]: DEBUG oslo.service.loopingcall [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.949297] env[62923]: DEBUG nova.compute.manager [-] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.949398] env[62923]: DEBUG nova.network.neutron [-] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 530.987881] env[62923]: DEBUG nova.network.neutron [-] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.210504] env[62923]: DEBUG nova.scheduler.client.report [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.477243] env[62923]: DEBUG nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 531.492102] env[62923]: DEBUG nova.network.neutron [-] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.534310] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.534536] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.534703] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.534870] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.538288] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.538288] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.538288] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.538288] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.538288] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.538443] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.538554] env[62923]: DEBUG nova.virt.hardware [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.539788] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f998736-5b6a-45f4-ae12-b15b896d4cfe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.546712] env[62923]: DEBUG oslo_vmware.rw_handles [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Completed reading data from the image iterator. {{(pid=62923) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 531.546920] env[62923]: DEBUG oslo_vmware.rw_handles [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 531.553401] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7e2265-246a-4197-987c-f75450d56e18 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.578283] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 531.584342] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Creating folder: Project (d3f01faee2af42929c030052505138a6). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 531.584955] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7168455b-3c01-404a-ad54-61bede674f09 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.597023] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Created folder: Project (d3f01faee2af42929c030052505138a6) in parent group-v291405. [ 531.597023] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Creating folder: Instances. Parent ref: group-v291409. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 531.597397] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Downloaded image file data cd84cf13-77b9-4bc1-bb15-31bece605a8e to vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk on the data store datastore1 {{(pid=62923) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 531.598952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Caching image {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 531.600113] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copying Virtual Disk [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk to [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 531.600407] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be1a398d-75a4-4807-9b40-c841868a74b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.602182] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20af57eb-785f-4caf-b75f-50f1df5229a5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.611910] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 531.611910] env[62923]: value = "task-1369816" [ 531.611910] env[62923]: _type = "Task" [ 531.611910] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.616236] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Created folder: Instances in parent group-v291409. [ 531.616236] env[62923]: DEBUG oslo.service.loopingcall [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 531.620293] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 531.620293] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-409e0bdd-f22e-4076-8582-40c98d1cd61f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.636357] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.636357] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 531.636357] env[62923]: value = "task-1369817" [ 531.636357] env[62923]: _type = "Task" [ 531.636357] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.647094] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369817, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.716932] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.717828] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.728032] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.394s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.728032] env[62923]: INFO nova.compute.claims [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 532.001113] env[62923]: INFO nova.compute.manager [-] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Took 1.05 seconds to deallocate network for instance. [ 532.007996] env[62923]: DEBUG nova.compute.claims [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 532.008207] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.024023] env[62923]: DEBUG nova.compute.manager [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Received event network-changed-53416306-bdfd-497d-bae2-1d6d08d01e4c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 532.024023] env[62923]: DEBUG nova.compute.manager [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Refreshing instance network info cache due to event network-changed-53416306-bdfd-497d-bae2-1d6d08d01e4c. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 532.024023] env[62923]: DEBUG oslo_concurrency.lockutils [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] Acquiring lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.024023] env[62923]: DEBUG oslo_concurrency.lockutils [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] Acquired lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.024023] env[62923]: DEBUG nova.network.neutron [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Refreshing network info cache for port 53416306-bdfd-497d-bae2-1d6d08d01e4c {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 532.124493] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369816, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.158027] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369817, 'name': CreateVM_Task, 'duration_secs': 0.285585} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.158320] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 532.158771] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.158931] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.159366] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 532.159737] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-555837e1-e1a9-4485-8d7c-6f802eb6295d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.165223] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 532.165223] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5217cc14-a077-b26f-ea4a-97bf94d92519" [ 532.165223] env[62923]: _type = "Task" [ 532.165223] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.181613] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5217cc14-a077-b26f-ea4a-97bf94d92519, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.238633] env[62923]: DEBUG nova.compute.utils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.248256] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 532.248256] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 532.275479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "e0ce4383-cade-4d85-a8a5-2437b9203d98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.275479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "e0ce4383-cade-4d85-a8a5-2437b9203d98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.565404] env[62923]: DEBUG nova.policy [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c2306873fe24d2db075445aeea97ddd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29ba5bda1975408fb2b2f4691089784d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 532.567408] env[62923]: DEBUG nova.network.neutron [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.630524] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369816, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695536} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.630805] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copied Virtual Disk [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk to [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 532.630971] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleting the datastore file [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 532.631254] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c97ce0a-5db7-436c-b23d-4e2ef3c9ba3f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.642856] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 532.642856] env[62923]: value = "task-1369818" [ 532.642856] env[62923]: _type = "Task" [ 532.642856] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.662655] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022729} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.662655] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 532.662655] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Moving file from [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed/cd84cf13-77b9-4bc1-bb15-31bece605a8e to [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e. {{(pid=62923) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 532.662961] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-99f0cad5-dc88-493c-8348-007734c24324 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.673941] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 532.673941] env[62923]: value = "task-1369819" [ 532.673941] env[62923]: _type = "Task" [ 532.673941] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.681511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.681750] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 532.681973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.685479] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369819, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.746337] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 532.781068] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 532.805606] env[62923]: DEBUG nova.network.neutron [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.895560] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec99e456-3d72-4850-b07b-b296b253eda4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.904858] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d589630-b499-43a5-ae94-bccd31241919 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.936453] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4e9606-5be5-45b8-acab-6a342b786356 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.944323] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2316601-a241-431b-a558-782b1e997de1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.948630] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.948698] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.948859] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 532.948981] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 532.959575] env[62923]: DEBUG nova.compute.provider_tree [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 533.193516] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369819, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.02372} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.193956] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] File moved {{(pid=62923) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 533.195316] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Cleaning up location [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 533.195495] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleting the datastore file [datastore1] vmware_temp/1f356b38-d2d0-47fa-b1d0-f5833975e9ed {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 533.195775] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7921f705-dace-43fc-aadd-7dab01b311bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.211274] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 533.211274] env[62923]: value = "task-1369820" [ 533.211274] env[62923]: _type = "Task" [ 533.211274] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.230281] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.248312] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Successfully created port: 6bcf7b58-225b-48db-aaef-834cc06a36b1 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.318302] env[62923]: DEBUG oslo_concurrency.lockutils [req-774dea15-b7c6-4a71-b40a-f4205ce4d06f req-faa106d0-ec1a-48ff-ad7b-9fb0468efe96 service nova] Releasing lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.321832] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.388351] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquiring lock "3cb4806d-dffa-4c41-9730-f29d2aad059a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.388678] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Lock "3cb4806d-dffa-4c41-9730-f29d2aad059a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.452466] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 533.454907] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 533.454907] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 533.454907] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 533.454907] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 533.454907] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Didn't find any instances for network info cache update. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 533.454907] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.455270] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.455270] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.455270] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.455811] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.456179] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.456543] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 533.456941] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 533.462526] env[62923]: DEBUG nova.scheduler.client.report [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.726037] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031421} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.726304] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 533.727231] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f29c85a-de26-4715-b2de-b11b8d1f61cd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.732460] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 533.732460] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52163087-81c7-f8d5-89df-83c8faad7220" [ 533.732460] env[62923]: _type = "Task" [ 533.732460] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.740929] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52163087-81c7-f8d5-89df-83c8faad7220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.758591] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 533.786680] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.787049] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.787217] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.787470] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.787529] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.787669] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.787868] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.788062] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.788236] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.788582] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.788582] env[62923]: DEBUG nova.virt.hardware [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.789559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479396dd-406a-430b-8697-46118c3288f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.797603] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35eb507b-d3b8-4e94-816f-d326bda59532 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.891647] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 533.960129] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.975258] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.978642] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 533.981674] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.973s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.249586] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52163087-81c7-f8d5-89df-83c8faad7220, 'name': SearchDatastore_Task, 'duration_secs': 0.008791} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.249586] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.249586] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 534.249940] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.250073] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 534.251188] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27d2e5e3-6f52-4b95-84c2-598d5b517a6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.252680] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e84cded-250c-4103-9ffe-8107ebb40b0a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.258730] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 534.258730] env[62923]: value = "task-1369821" [ 534.258730] env[62923]: _type = "Task" [ 534.258730] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.265626] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 534.266037] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 534.269253] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c1d79a0-470c-4be4-9462-41b0eef8df58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.275523] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.279108] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 534.279108] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527b6eaf-0c38-9d7f-75b8-fce8d3a3f2d7" [ 534.279108] env[62923]: _type = "Task" [ 534.279108] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.288415] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527b6eaf-0c38-9d7f-75b8-fce8d3a3f2d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.436357] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.486966] env[62923]: DEBUG nova.compute.utils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.491672] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 534.491857] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 534.575377] env[62923]: DEBUG nova.policy [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '30cff0dac4ab46b2be3f470af5674532', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc278af083dd473fb97648030669de4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 534.656339] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf1e624-0dae-42c7-afa5-ab6c9a83aebd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.668046] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e73348c-b730-4072-a179-e841a673d312 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.679508] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquiring lock "4fe36f05-d730-4fb1-ab05-0425be619dfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.679841] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Lock "4fe36f05-d730-4fb1-ab05-0425be619dfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.709765] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faff7d2d-930f-4d30-8892-20270e26c0b3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.721489] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5c124f-a360-410d-a28f-970172459a77 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.740852] env[62923]: DEBUG nova.compute.provider_tree [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.755953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "04d96512-dc09-42ff-96d0-961f7359318c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.756236] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "04d96512-dc09-42ff-96d0-961f7359318c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.769569] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369821, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.790144] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527b6eaf-0c38-9d7f-75b8-fce8d3a3f2d7, 'name': SearchDatastore_Task, 'duration_secs': 0.016255} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.791174] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10f524c4-1cab-4814-b62e-17148a30dd1e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.796701] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 534.796701] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5256e734-dfc9-c415-d39e-3af47a877956" [ 534.796701] env[62923]: _type = "Task" [ 534.796701] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.807054] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5256e734-dfc9-c415-d39e-3af47a877956, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.998214] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 535.209492] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 535.242996] env[62923]: DEBUG nova.scheduler.client.report [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.259362] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 535.269613] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Successfully created port: a44148b3-774a-48ee-acac-8d757051c452 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 535.280626] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51686} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.283786] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 535.284118] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 535.284425] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd74bbb8-c102-490e-b6a3-768410fd2357 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.291827] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 535.291827] env[62923]: value = "task-1369822" [ 535.291827] env[62923]: _type = "Task" [ 535.291827] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.305415] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.311920] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5256e734-dfc9-c415-d39e-3af47a877956, 'name': SearchDatastore_Task, 'duration_secs': 0.049071} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.312268] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.312517] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3/6ca62d1b-9533-4b83-8e8a-7f62a34c90a3.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 535.312766] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e3c50da-3f6f-4697-b3fe-1a5ba0010c4f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.321356] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 535.321356] env[62923]: value = "task-1369823" [ 535.321356] env[62923]: _type = "Task" [ 535.321356] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.329802] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.744914] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.752895] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.770s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.752895] env[62923]: ERROR nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Traceback (most recent call last): [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self.driver.spawn(context, instance, image_meta, [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 535.752895] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] vm_ref = self.build_virtual_machine(instance, [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] vif_infos = vmwarevif.get_vif_info(self._session, [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] for vif in network_info: [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return self._sync_wrapper(fn, *args, **kwargs) [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self.wait() [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self[:] = self._gt.wait() [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return self._exit_event.wait() [ 535.753255] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] result = hub.switch() [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return self.greenlet.switch() [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] result = function(*args, **kwargs) [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] return func(*args, **kwargs) [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] raise e [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] nwinfo = self.network_api.allocate_for_instance( [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 535.753570] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] created_port_ids = self._update_ports_for_instance( [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] with excutils.save_and_reraise_exception(): [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] self.force_reraise() [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] raise self.value [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] updated_port = self._update_port( [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] _ensure_no_port_binding_failure(port) [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.753882] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] raise exception.PortBindingFailed(port_id=port['id']) [ 535.754181] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] nova.exception.PortBindingFailed: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. [ 535.754181] env[62923]: ERROR nova.compute.manager [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] [ 535.754181] env[62923]: DEBUG nova.compute.utils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 535.754837] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.434s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.760532] env[62923]: INFO nova.compute.claims [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.766420] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Build of instance 04f788bb-19ea-456d-93eb-7398f5dbee35 was re-scheduled: Binding failed for port 53416306-bdfd-497d-bae2-1d6d08d01e4c, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 535.767812] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 535.769276] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquiring lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.771536] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Acquired lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.771536] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 535.806668] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.817664] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064949} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.818709] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 535.820742] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65167155-2509-44db-9880-8f301baeea6f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.863986] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 535.869305] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a26822e-01b6-4397-a927-24899b040795 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.884070] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369823, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.890554] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 535.890554] env[62923]: value = "task-1369824" [ 535.890554] env[62923]: _type = "Task" [ 535.890554] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.904783] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.007956] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 536.040152] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 536.040552] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 536.042170] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 536.042170] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 536.042170] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 536.042170] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 536.042170] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 536.042484] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 536.042777] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 536.042959] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 536.043146] env[62923]: DEBUG nova.virt.hardware [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 536.045096] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d55586-3c8d-4d6a-a773-5c67937d2d62 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.057610] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d12bff-9deb-485e-bdca-ab0dd897eb07 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.143887] env[62923]: ERROR nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 536.143887] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.143887] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.143887] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.143887] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.143887] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.143887] env[62923]: ERROR nova.compute.manager raise self.value [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.143887] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.143887] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.143887] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.144401] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.144401] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.144401] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 536.144401] env[62923]: ERROR nova.compute.manager [ 536.144401] env[62923]: Traceback (most recent call last): [ 536.144401] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.144401] env[62923]: listener.cb(fileno) [ 536.144401] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.144401] env[62923]: result = function(*args, **kwargs) [ 536.144401] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.144401] env[62923]: return func(*args, **kwargs) [ 536.144401] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.144401] env[62923]: raise e [ 536.144401] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.144401] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 536.144401] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.144401] env[62923]: created_port_ids = self._update_ports_for_instance( [ 536.144401] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.144401] env[62923]: with excutils.save_and_reraise_exception(): [ 536.144401] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.144401] env[62923]: self.force_reraise() [ 536.144401] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.144401] env[62923]: raise self.value [ 536.144401] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.144401] env[62923]: updated_port = self._update_port( [ 536.144401] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.144401] env[62923]: _ensure_no_port_binding_failure(port) [ 536.144401] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.144401] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.145211] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 536.145211] env[62923]: Removing descriptor: 15 [ 536.145211] env[62923]: ERROR nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Traceback (most recent call last): [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] yield resources [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self.driver.spawn(context, instance, image_meta, [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.145211] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] vm_ref = self.build_virtual_machine(instance, [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] for vif in network_info: [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return self._sync_wrapper(fn, *args, **kwargs) [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self.wait() [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self[:] = self._gt.wait() [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return self._exit_event.wait() [ 536.145779] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] result = hub.switch() [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return self.greenlet.switch() [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] result = function(*args, **kwargs) [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return func(*args, **kwargs) [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] raise e [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] nwinfo = self.network_api.allocate_for_instance( [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.146196] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] created_port_ids = self._update_ports_for_instance( [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] with excutils.save_and_reraise_exception(): [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self.force_reraise() [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] raise self.value [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] updated_port = self._update_port( [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] _ensure_no_port_binding_failure(port) [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.146836] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] raise exception.PortBindingFailed(port_id=port['id']) [ 536.147209] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 536.147209] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] [ 536.147209] env[62923]: INFO nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Terminating instance [ 536.153484] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.153484] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquired lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.153484] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.324976] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.337959] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570644} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.337959] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3/6ca62d1b-9533-4b83-8e8a-7f62a34c90a3.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 536.337959] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 536.337959] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0ec3cef-ea8c-4271-ab7e-f0d1b0db966a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.346782] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 536.346782] env[62923]: value = "task-1369825" [ 536.346782] env[62923]: _type = "Task" [ 536.346782] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.356471] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.403998] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369824, 'name': ReconfigVM_Task, 'duration_secs': 0.281937} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.404625] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Reconfigured VM instance instance-00000002 to attach disk [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 536.405256] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e5d3b30-617f-4152-a9e6-9a911e97504f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.411787] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 536.411787] env[62923]: value = "task-1369826" [ 536.411787] env[62923]: _type = "Task" [ 536.411787] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.424736] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369826, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.622967] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.711314] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.859930] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187947} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.860340] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 536.861951] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dce1aa-2c8d-4e1e-a1ca-03f225129667 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.886977] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3/6ca62d1b-9533-4b83-8e8a-7f62a34c90a3.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 536.887562] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f7786b2-8f8d-4ee9-82c5-fa0cae5564a5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.913102] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.918784] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 536.918784] env[62923]: value = "task-1369827" [ 536.918784] env[62923]: _type = "Task" [ 536.918784] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.928296] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369826, 'name': Rename_Task, 'duration_secs': 0.154307} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 536.932831] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 536.933808] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdcd081d-1e2e-498f-90e1-c31fdb817ac3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.939488] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 536.944488] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 536.944488] env[62923]: value = "task-1369828" [ 536.944488] env[62923]: _type = "Task" [ 536.944488] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 536.956318] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369828, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.127953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Releasing lock "refresh_cache-04f788bb-19ea-456d-93eb-7398f5dbee35" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.128203] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 537.128398] env[62923]: DEBUG nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.128558] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 537.133585] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208e8f42-435e-4311-98c2-2b1acdef5a48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.143871] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58864328-7e2a-4728-b542-a02a9f60bcab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.177503] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5906177a-3381-44c0-b847-e91e741e6318 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.186460] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc910c70-9a5e-4952-8ec9-6f1c4de8b85f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.203497] env[62923]: DEBUG nova.compute.provider_tree [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.282348] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.420239] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Releasing lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.420665] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 537.421056] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 537.421229] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33498dfa-97e9-4e6d-9a59-ddf209da3120 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.433576] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.437759] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d03714-26ca-4b13-82b0-5fb5fd41eb6c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.468611] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369828, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.473963] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 795b645d-3aee-4dd8-9537-2277f86c5b10 could not be found. [ 537.474242] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 537.474433] env[62923]: INFO nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Took 0.05 seconds to destroy the instance on the hypervisor. [ 537.474761] env[62923]: DEBUG oslo.service.loopingcall [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.474861] env[62923]: DEBUG nova.compute.manager [-] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.474983] env[62923]: DEBUG nova.network.neutron [-] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 537.539176] env[62923]: DEBUG nova.network.neutron [-] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.711399] env[62923]: DEBUG nova.scheduler.client.report [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.757571] env[62923]: DEBUG nova.compute.manager [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Received event network-changed-6bcf7b58-225b-48db-aaef-834cc06a36b1 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 537.757763] env[62923]: DEBUG nova.compute.manager [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Refreshing instance network info cache due to event network-changed-6bcf7b58-225b-48db-aaef-834cc06a36b1. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 537.757964] env[62923]: DEBUG oslo_concurrency.lockutils [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] Acquiring lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.758111] env[62923]: DEBUG oslo_concurrency.lockutils [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] Acquired lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.758262] env[62923]: DEBUG nova.network.neutron [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Refreshing network info cache for port 6bcf7b58-225b-48db-aaef-834cc06a36b1 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 537.786631] env[62923]: DEBUG nova.network.neutron [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.937107] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369827, 'name': ReconfigVM_Task, 'duration_secs': 0.598317} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.937107] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3/6ca62d1b-9533-4b83-8e8a-7f62a34c90a3.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 537.937877] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2305ee7f-1355-4f8f-9d8e-0b69014f076e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.944044] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 537.944044] env[62923]: value = "task-1369829" [ 537.944044] env[62923]: _type = "Task" [ 537.944044] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.955706] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369829, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.969336] env[62923]: DEBUG oslo_vmware.api [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369828, 'name': PowerOnVM_Task, 'duration_secs': 0.748097} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.969336] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 537.969336] env[62923]: INFO nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Took 8.71 seconds to spawn the instance on the hypervisor. [ 537.969336] env[62923]: DEBUG nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 537.969336] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3bc295-917c-48a2-b23f-db0cd5ee3911 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.042461] env[62923]: DEBUG nova.network.neutron [-] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.221879] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.225018] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 538.228844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.268s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.228844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.228844] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 538.228844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.793s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.230827] env[62923]: INFO nova.compute.claims [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 538.236646] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21904c50-a349-4333-ae40-2ef8e5f9695d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.263303] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f3901f-e7d9-4831-adb7-ccec9be7acda {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.288264] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2c5452-1cc6-4386-bc6d-0a15428acebd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.295205] env[62923]: INFO nova.compute.manager [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] [instance: 04f788bb-19ea-456d-93eb-7398f5dbee35] Took 1.17 seconds to deallocate network for instance. [ 538.304255] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aab6601-9255-4760-82f0-261110a5f34d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.337019] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181490MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 538.337019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.337019] env[62923]: DEBUG nova.network.neutron [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.456520] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369829, 'name': Rename_Task, 'duration_secs': 0.147} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.456520] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 538.456520] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1776a81-6c4c-43bf-b30a-2520aa2828ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.463517] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 538.463517] env[62923]: value = "task-1369830" [ 538.463517] env[62923]: _type = "Task" [ 538.463517] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.471018] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.496524] env[62923]: INFO nova.compute.manager [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Took 14.18 seconds to build instance. [ 538.548125] env[62923]: INFO nova.compute.manager [-] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Took 1.07 seconds to deallocate network for instance. [ 538.549973] env[62923]: DEBUG nova.compute.claims [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 538.554023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.556430] env[62923]: DEBUG nova.network.neutron [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.737341] env[62923]: DEBUG nova.compute.utils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.738752] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 538.738920] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 538.976450] env[62923]: DEBUG oslo_vmware.api [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369830, 'name': PowerOnVM_Task, 'duration_secs': 0.438805} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.976730] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 538.976935] env[62923]: INFO nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Took 7.50 seconds to spawn the instance on the hypervisor. [ 538.977455] env[62923]: DEBUG nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 538.978764] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a7bd0e-448c-43ff-b99c-eb09b2c6888b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.999089] env[62923]: DEBUG oslo_concurrency.lockutils [None req-03f8539b-ba6c-4c65-a3f9-21b98138c040 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.689s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.020533] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "66b7e04a-968b-44b0-9bb2-e467810bbf6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.020880] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "66b7e04a-968b-44b0-9bb2-e467810bbf6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.062900] env[62923]: DEBUG oslo_concurrency.lockutils [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] Releasing lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.063201] env[62923]: DEBUG nova.compute.manager [req-230a4895-81e5-43fa-a383-0186c4fd81d3 req-4b262233-1648-4aef-b508-326574b88f4a service nova] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Received event network-vif-deleted-6bcf7b58-225b-48db-aaef-834cc06a36b1 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.098294] env[62923]: DEBUG nova.policy [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f97e9310cc61485880bd78073f822586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '836bad5bd552469984890ff6a1a916d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 539.242356] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 539.332879] env[62923]: INFO nova.scheduler.client.report [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Deleted allocations for instance 04f788bb-19ea-456d-93eb-7398f5dbee35 [ 539.456266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecf1bc8-51d6-46a7-8d6f-3237e35c2d99 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.464267] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1567ebed-297e-4dcd-91b8-9785f62c7f78 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.503497] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4b5fcd-b251-4872-8176-24033d48e316 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.506032] env[62923]: INFO nova.compute.manager [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Took 14.32 seconds to build instance. [ 539.512055] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdf8c84-4ccb-4526-8cfd-2f71c28c1c46 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.529017] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 539.530772] env[62923]: DEBUG nova.compute.provider_tree [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.684840] env[62923]: ERROR nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 539.684840] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.684840] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.684840] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.684840] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.684840] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.684840] env[62923]: ERROR nova.compute.manager raise self.value [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.684840] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 539.684840] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.684840] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 539.685343] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.685343] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 539.685343] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 539.685343] env[62923]: ERROR nova.compute.manager [ 539.685343] env[62923]: Traceback (most recent call last): [ 539.685343] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 539.685343] env[62923]: listener.cb(fileno) [ 539.685343] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.685343] env[62923]: result = function(*args, **kwargs) [ 539.685343] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 539.685343] env[62923]: return func(*args, **kwargs) [ 539.685343] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.685343] env[62923]: raise e [ 539.685343] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.685343] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 539.685343] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.685343] env[62923]: created_port_ids = self._update_ports_for_instance( [ 539.685343] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.685343] env[62923]: with excutils.save_and_reraise_exception(): [ 539.685343] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.685343] env[62923]: self.force_reraise() [ 539.685343] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.685343] env[62923]: raise self.value [ 539.685343] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.685343] env[62923]: updated_port = self._update_port( [ 539.685343] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.685343] env[62923]: _ensure_no_port_binding_failure(port) [ 539.685343] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.685343] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 539.686101] env[62923]: nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 539.686101] env[62923]: Removing descriptor: 18 [ 539.686101] env[62923]: ERROR nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] Traceback (most recent call last): [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] yield resources [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self.driver.spawn(context, instance, image_meta, [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.686101] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] vm_ref = self.build_virtual_machine(instance, [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] for vif in network_info: [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return self._sync_wrapper(fn, *args, **kwargs) [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self.wait() [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self[:] = self._gt.wait() [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return self._exit_event.wait() [ 539.686580] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] result = hub.switch() [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return self.greenlet.switch() [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] result = function(*args, **kwargs) [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return func(*args, **kwargs) [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] raise e [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] nwinfo = self.network_api.allocate_for_instance( [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.686951] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] created_port_ids = self._update_ports_for_instance( [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] with excutils.save_and_reraise_exception(): [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self.force_reraise() [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] raise self.value [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] updated_port = self._update_port( [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] _ensure_no_port_binding_failure(port) [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.687299] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] raise exception.PortBindingFailed(port_id=port['id']) [ 539.687609] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 539.687609] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] [ 539.687609] env[62923]: INFO nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Terminating instance [ 539.689923] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquiring lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.689923] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquired lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.690081] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 539.845696] env[62923]: DEBUG oslo_concurrency.lockutils [None req-739ac146-a47d-4aba-9196-f950b35de382 tempest-ImagesOneServerTestJSON-993215476 tempest-ImagesOneServerTestJSON-993215476-project-member] Lock "04f788bb-19ea-456d-93eb-7398f5dbee35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.077s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.858815] env[62923]: DEBUG nova.compute.manager [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Received event network-changed-a44148b3-774a-48ee-acac-8d757051c452 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.858999] env[62923]: DEBUG nova.compute.manager [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Refreshing instance network info cache due to event network-changed-a44148b3-774a-48ee-acac-8d757051c452. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 539.859200] env[62923]: DEBUG oslo_concurrency.lockutils [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] Acquiring lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.008101] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bcc5929b-d5fc-455c-8b5a-c3440a0c0ade tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.839s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.035786] env[62923]: DEBUG nova.scheduler.client.report [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 540.078114] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.249784] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.258149] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 540.295215] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.296254] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.296254] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.296416] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.296665] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.296863] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.297120] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.297398] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.297825] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.298019] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.298241] env[62923]: DEBUG nova.virt.hardware [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.299197] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9b42f3-e5cd-4784-ba4a-61c4502d9a61 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.308883] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d34952-8a19-47f5-b66c-11372105fcf2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.551491] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.552213] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 540.555752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.811s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.560321] env[62923]: INFO nova.compute.claims [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.686858] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.909869] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Successfully created port: 51b8166c-7348-41f6-809a-c827f13e7dbe {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 541.065480] env[62923]: DEBUG nova.compute.utils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 541.069439] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 541.069751] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 541.173049] env[62923]: DEBUG nova.policy [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43da24d2342947f1824eb19a33f2f791', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd355586106ec4d27ac6ee442cfd04d73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 541.190442] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Releasing lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.192457] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 541.192457] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 541.192457] env[62923]: DEBUG oslo_concurrency.lockutils [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] Acquired lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.192457] env[62923]: DEBUG nova.network.neutron [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Refreshing network info cache for port a44148b3-774a-48ee-acac-8d757051c452 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 541.192911] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a04039c-8b89-4d82-be75-0f8b1c733954 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.204715] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598a60c2-34c7-4d5e-99d0-4336bfaf0414 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.233904] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f81af398-7382-4433-9a24-07d16fd1223b could not be found. [ 541.233904] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 541.234789] env[62923]: INFO nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 541.234867] env[62923]: DEBUG oslo.service.loopingcall [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 541.235398] env[62923]: DEBUG nova.compute.manager [-] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 541.235545] env[62923]: DEBUG nova.network.neutron [-] [instance: f81af398-7382-4433-9a24-07d16fd1223b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 541.288816] env[62923]: DEBUG nova.network.neutron [-] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.427195] env[62923]: INFO nova.compute.manager [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Rebuilding instance [ 541.482116] env[62923]: DEBUG nova.compute.manager [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 541.484009] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42503d73-23bc-495a-ace2-de72c1c08a9d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.573495] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 541.605607] env[62923]: DEBUG nova.compute.manager [None req-099f6ed1-1e43-4f53-b301-cd4ee5def466 tempest-ServerDiagnosticsV248Test-472437528 tempest-ServerDiagnosticsV248Test-472437528-project-admin] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 541.608479] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc5d312-d787-4f8d-a509-08f001cb8868 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.627587] env[62923]: INFO nova.compute.manager [None req-099f6ed1-1e43-4f53-b301-cd4ee5def466 tempest-ServerDiagnosticsV248Test-472437528 tempest-ServerDiagnosticsV248Test-472437528-project-admin] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Retrieving diagnostics [ 541.629078] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6234ecee-f2ee-4959-8df6-4aaf53e221a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.731711] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquiring lock "e08d9f27-d2b3-4532-862f-b68e830e8d17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.732061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Lock "e08d9f27-d2b3-4532-862f-b68e830e8d17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.793580] env[62923]: DEBUG nova.network.neutron [-] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.810726] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ed44bb-63e4-40a2-8d7c-8f31d59139d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.821483] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51aa9682-51fe-4739-8613-7aa18a59dd86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.856062] env[62923]: DEBUG nova.network.neutron [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.858603] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6cdb48-3afa-4019-be23-72ad45b8bb9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.872848] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb02d1c-86a4-4047-be34-a312d23f3a84 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.888025] env[62923]: DEBUG nova.compute.provider_tree [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.002288] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 542.002288] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a4e7d9b-d661-43cf-b217-7c86f21b01d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.011820] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 542.011820] env[62923]: value = "task-1369831" [ 542.011820] env[62923]: _type = "Task" [ 542.011820] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.021378] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.234893] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.297495] env[62923]: INFO nova.compute.manager [-] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Took 1.06 seconds to deallocate network for instance. [ 542.298579] env[62923]: DEBUG nova.compute.claims [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 542.298973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.366953] env[62923]: DEBUG nova.network.neutron [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.391821] env[62923]: DEBUG nova.scheduler.client.report [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.526484] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369831, 'name': PowerOffVM_Task, 'duration_secs': 0.129531} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.526484] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 542.526484] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 542.526484] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66574ef-dabd-4110-b955-e11162c39fc3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.535550] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 542.535891] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1edefb40-d2db-4d4f-b786-3cf9a1fcba8d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.561844] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 542.562126] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 542.562332] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleting the datastore file [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 542.562639] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6489523-0f5a-49b4-8b0c-7c8be89ed435 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.572725] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 542.572725] env[62923]: value = "task-1369833" [ 542.572725] env[62923]: _type = "Task" [ 542.572725] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.586409] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369833, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.592622] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 542.638377] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.638600] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.638743] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.638922] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.639062] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.639224] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.639414] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.639559] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.639710] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.639858] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.640030] env[62923]: DEBUG nova.virt.hardware [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.640897] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e0879d-ec2b-403d-be36-a3ae9619f56b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.653132] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f868b168-c319-4d59-ab4e-5eab17d5eaf9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.766074] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.869825] env[62923]: DEBUG oslo_concurrency.lockutils [req-865f30b6-27e8-45ea-833b-843d0fa55f64 req-1dcb13be-a522-406d-9d7a-dd958c628f23 service nova] Releasing lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.902445] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.902445] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 542.905356] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.099s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.909581] env[62923]: INFO nova.compute.claims [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.037282] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Successfully created port: 8137e2da-4915-439e-9eec-7b3b901c8237 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 543.088218] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369833, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100841} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.088493] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 543.088579] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 543.089410] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 543.414849] env[62923]: DEBUG nova.compute.utils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.418066] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 543.418239] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 543.532527] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquiring lock "33910d6d-0911-4e2a-82af-1b705cedd3fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.532527] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Lock "33910d6d-0911-4e2a-82af-1b705cedd3fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.724514] env[62923]: DEBUG nova.policy [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7d8405027a74caf8e069bb8a05dc70c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b0d04c0a52b428ba859794cd3720416', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 543.920232] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 544.036481] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 544.139241] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.139241] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.139241] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.139241] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.139406] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.139406] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.139406] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.139406] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.139517] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.140323] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.140323] env[62923]: DEBUG nova.virt.hardware [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.142052] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc6e1a9-159b-4949-b72a-bc072c0f1a74 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.155092] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb111da-42bf-41be-a01f-8124b0df6073 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.156244] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7f73e0-d705-46b1-9d55-98775b26169e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.173406] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb593e7-99eb-4691-89ed-a242c52b5fd8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.177341] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 544.184441] env[62923]: DEBUG oslo.service.loopingcall [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.184566] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 544.185140] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21d81eea-c81a-44dd-a559-ac7e7b98dc3f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.230018] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802bbb82-bbff-4d30-99b5-57b60f3d9be4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.230018] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 544.230018] env[62923]: value = "task-1369834" [ 544.230018] env[62923]: _type = "Task" [ 544.230018] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.237581] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fbebdea-f023-4af0-869f-b0a53f1e428d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.246894] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369834, 'name': CreateVM_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.256387] env[62923]: DEBUG nova.compute.provider_tree [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.587756] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.663058] env[62923]: DEBUG nova.compute.manager [req-ca4db2b1-6d38-4faf-9209-9d1690b5ecd2 req-1f3bf6d1-b1ed-4cf4-9fcd-e1adf8ac773f service nova] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Received event network-vif-deleted-a44148b3-774a-48ee-acac-8d757051c452 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 544.742417] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369834, 'name': CreateVM_Task, 'duration_secs': 0.348842} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.742417] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 544.743384] env[62923]: DEBUG oslo_vmware.service [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba13e93-e95a-4475-8a5c-96a8548f4d02 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.754185] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.754352] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.754772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 544.755058] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd51209b-bc91-49bd-ae22-a95364c674e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.760372] env[62923]: DEBUG nova.scheduler.client.report [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.764606] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 544.764606] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c552de-36e8-6f31-46a8-fd515aeddada" [ 544.764606] env[62923]: _type = "Task" [ 544.764606] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.776865] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.776950] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 544.777177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.778854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.778854] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 544.778854] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edb23cb7-e8cf-433d-8af9-bff41d36ea73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.795421] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 544.795684] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 544.796521] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5dd772-5d36-44be-9ccc-4ee574495970 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.804815] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5aeb20e-d286-4a95-ae69-cfd3fd1279f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.811475] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 544.811475] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c9560e-8c54-e204-923b-8b29129c968b" [ 544.811475] env[62923]: _type = "Task" [ 544.811475] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.819161] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c9560e-8c54-e204-923b-8b29129c968b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.938332] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 544.977114] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.977114] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.977376] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.977977] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.977977] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.978592] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.978592] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.978592] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.978680] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.978786] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.978949] env[62923]: DEBUG nova.virt.hardware [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.980251] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4cf1ab-fd3a-48a2-ae09-20711d138b2d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.988822] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfb9ce8-cf3c-45ce-943d-2715edd86496 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.217882] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquiring lock "25758cb8-6635-4284-bc94-a95389af3c8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.218107] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Lock "25758cb8-6635-4284-bc94-a95389af3c8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.266637] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.268018] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 545.273927] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.938s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.329141] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Preparing fetch location {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 545.329141] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating directory with path [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 545.329141] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3338bf51-0da4-4333-aacb-a1c265b2114d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.352441] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Created directory with path [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 545.352543] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Fetch image to [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 545.352705] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Downloading image file data cd84cf13-77b9-4bc1-bb15-31bece605a8e to [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk on the data store datastore2 {{(pid=62923) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 545.353664] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ef755f-73e8-4278-8f88-19ee82d9fe45 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.362501] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f944bfe-eca3-4488-855b-f3c923d064ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.373703] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c265b2c2-8cc2-44f7-80d8-fc716f22fb4e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.419999] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b725470-dfd0-4f4d-9c6b-4d2ca8559870 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.428294] env[62923]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8a5b0015-a9cb-4856-9851-e00eb0dc6737 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.462387] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Downloading image file data cd84cf13-77b9-4bc1-bb15-31bece605a8e to the data store datastore2 {{(pid=62923) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 545.533636] env[62923]: DEBUG oslo_vmware.rw_handles [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62923) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 545.720963] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 545.781019] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Successfully created port: 63c15049-830e-4a51-bf4e-e3cfee34896a {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.783970] env[62923]: DEBUG nova.compute.utils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.793523] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 545.793723] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 545.983532] env[62923]: DEBUG nova.policy [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f97e9310cc61485880bd78073f822586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '836bad5bd552469984890ff6a1a916d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 546.199963] env[62923]: DEBUG oslo_vmware.rw_handles [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Completed reading data from the image iterator. {{(pid=62923) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 546.200260] env[62923]: DEBUG oslo_vmware.rw_handles [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 546.253226] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquiring lock "6f5cc707-70e8-48fb-8d9d-904f3e0130af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.253226] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Lock "6f5cc707-70e8-48fb-8d9d-904f3e0130af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.260494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.290771] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 546.318923] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance a33da17c-bbb2-4307-b4b3-56cec5cb757e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319095] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319227] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 795b645d-3aee-4dd8-9537-2277f86c5b10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319340] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f81af398-7382-4433-9a24-07d16fd1223b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319552] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance e0ce4383-cade-4d85-a8a5-2437b9203d98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319552] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 3cb4806d-dffa-4c41-9730-f29d2aad059a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319658] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 04d96512-dc09-42ff-96d0-961f7359318c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.319762] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 4fe36f05-d730-4fb1-ab05-0425be619dfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 546.346882] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Downloaded image file data cd84cf13-77b9-4bc1-bb15-31bece605a8e to vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk on the data store datastore2 {{(pid=62923) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 546.349622] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Caching image {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 546.349753] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copying Virtual Disk [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk to [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 546.350076] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87f37fe4-f0da-4916-bd8b-970e102900f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.359267] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 546.359267] env[62923]: value = "task-1369835" [ 546.359267] env[62923]: _type = "Task" [ 546.359267] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.371583] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.826068] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 66b7e04a-968b-44b0-9bb2-e467810bbf6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 546.873872] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369835, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.239737] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Successfully created port: 2bbe3814-1872-4090-baef-b83249364387 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.305719] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 547.331663] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance e08d9f27-d2b3-4532-862f-b68e830e8d17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 547.340755] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.343205] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.343756] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.343756] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.343756] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.343864] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.344073] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.344276] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.344401] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.344562] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.344727] env[62923]: DEBUG nova.virt.hardware [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.345592] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7edb26-15c0-4667-8577-cfaed695d475 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.366323] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980e10b9-187e-48f1-98cd-5fb666c94855 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.380139] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708124} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.388291] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copied Virtual Disk [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk to [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 547.388569] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleting the datastore file [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e/tmp-sparse.vmdk {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 547.389314] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe2d6ff4-762a-4637-b3f0-ec6440c0c85f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.395668] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 547.395668] env[62923]: value = "task-1369836" [ 547.395668] env[62923]: _type = "Task" [ 547.395668] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.403835] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.739110] env[62923]: ERROR nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 547.739110] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.739110] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.739110] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.739110] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.739110] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.739110] env[62923]: ERROR nova.compute.manager raise self.value [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.739110] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 547.739110] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.739110] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 547.739556] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.739556] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 547.739556] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 547.739556] env[62923]: ERROR nova.compute.manager [ 547.739556] env[62923]: Traceback (most recent call last): [ 547.739556] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 547.739556] env[62923]: listener.cb(fileno) [ 547.739556] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.739556] env[62923]: result = function(*args, **kwargs) [ 547.739556] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.739556] env[62923]: return func(*args, **kwargs) [ 547.739556] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.739556] env[62923]: raise e [ 547.739556] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.739556] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 547.739556] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.739556] env[62923]: created_port_ids = self._update_ports_for_instance( [ 547.739556] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.739556] env[62923]: with excutils.save_and_reraise_exception(): [ 547.739556] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.739556] env[62923]: self.force_reraise() [ 547.739556] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.739556] env[62923]: raise self.value [ 547.739556] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.739556] env[62923]: updated_port = self._update_port( [ 547.739556] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.739556] env[62923]: _ensure_no_port_binding_failure(port) [ 547.739556] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.739556] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 547.740246] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 547.740246] env[62923]: Removing descriptor: 15 [ 547.740246] env[62923]: ERROR nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Traceback (most recent call last): [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] yield resources [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self.driver.spawn(context, instance, image_meta, [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.740246] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] vm_ref = self.build_virtual_machine(instance, [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] for vif in network_info: [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return self._sync_wrapper(fn, *args, **kwargs) [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self.wait() [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self[:] = self._gt.wait() [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return self._exit_event.wait() [ 547.741379] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] result = hub.switch() [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return self.greenlet.switch() [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] result = function(*args, **kwargs) [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return func(*args, **kwargs) [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] raise e [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] nwinfo = self.network_api.allocate_for_instance( [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 547.741690] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] created_port_ids = self._update_ports_for_instance( [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] with excutils.save_and_reraise_exception(): [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self.force_reraise() [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] raise self.value [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] updated_port = self._update_port( [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] _ensure_no_port_binding_failure(port) [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.742013] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] raise exception.PortBindingFailed(port_id=port['id']) [ 547.742345] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 547.742345] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] [ 547.742345] env[62923]: INFO nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Terminating instance [ 547.744064] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.744626] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquired lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.744626] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.838068] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 33910d6d-0911-4e2a-82af-1b705cedd3fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 547.906542] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022318} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.906782] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 547.907035] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Moving file from [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83/cd84cf13-77b9-4bc1-bb15-31bece605a8e to [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e. {{(pid=62923) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 547.907257] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-2a1fd07c-f186-49b0-87eb-0632a9f6351b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.916505] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 547.916505] env[62923]: value = "task-1369837" [ 547.916505] env[62923]: _type = "Task" [ 547.916505] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.930303] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369837, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.327014] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.341904] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 25758cb8-6635-4284-bc94-a95389af3c8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 548.430859] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369837, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.02633} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.435193] env[62923]: ERROR nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 548.435193] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.435193] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.435193] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.435193] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.435193] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.435193] env[62923]: ERROR nova.compute.manager raise self.value [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.435193] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 548.435193] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.435193] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 548.435593] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.435593] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 548.435593] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 548.435593] env[62923]: ERROR nova.compute.manager [ 548.435593] env[62923]: Traceback (most recent call last): [ 548.435593] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 548.435593] env[62923]: listener.cb(fileno) [ 548.435593] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.435593] env[62923]: result = function(*args, **kwargs) [ 548.435593] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 548.435593] env[62923]: return func(*args, **kwargs) [ 548.435593] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.435593] env[62923]: raise e [ 548.435593] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.435593] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 548.435593] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.435593] env[62923]: created_port_ids = self._update_ports_for_instance( [ 548.435593] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.435593] env[62923]: with excutils.save_and_reraise_exception(): [ 548.435593] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.435593] env[62923]: self.force_reraise() [ 548.435593] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.435593] env[62923]: raise self.value [ 548.435593] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.435593] env[62923]: updated_port = self._update_port( [ 548.435593] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.435593] env[62923]: _ensure_no_port_binding_failure(port) [ 548.435593] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.435593] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 548.436306] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 548.436306] env[62923]: Removing descriptor: 18 [ 548.436306] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] File moved {{(pid=62923) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 548.436306] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Cleaning up location [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83 {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 548.436306] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleting the datastore file [datastore2] vmware_temp/8032f21f-1ab2-4cf7-84f3-81596be0ba83 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 548.436306] env[62923]: ERROR nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 548.436306] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Traceback (most recent call last): [ 548.436306] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] yield resources [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self.driver.spawn(context, instance, image_meta, [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] vm_ref = self.build_virtual_machine(instance, [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] for vif in network_info: [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 548.436554] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return self._sync_wrapper(fn, *args, **kwargs) [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self.wait() [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self[:] = self._gt.wait() [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return self._exit_event.wait() [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] result = hub.switch() [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return self.greenlet.switch() [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] result = function(*args, **kwargs) [ 548.437014] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return func(*args, **kwargs) [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] raise e [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] nwinfo = self.network_api.allocate_for_instance( [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] created_port_ids = self._update_ports_for_instance( [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] with excutils.save_and_reraise_exception(): [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self.force_reraise() [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.437403] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] raise self.value [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] updated_port = self._update_port( [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] _ensure_no_port_binding_failure(port) [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] raise exception.PortBindingFailed(port_id=port['id']) [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 548.437786] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] [ 548.437786] env[62923]: INFO nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Terminating instance [ 548.437786] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-548a9979-8d91-45e2-84e1-c7246f963d31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.440218] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquiring lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.440777] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquired lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.440977] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 548.451764] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 548.451764] env[62923]: value = "task-1369838" [ 548.451764] env[62923]: _type = "Task" [ 548.451764] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.462999] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.558093] env[62923]: DEBUG nova.compute.manager [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Received event network-changed-51b8166c-7348-41f6-809a-c827f13e7dbe {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 548.558093] env[62923]: DEBUG nova.compute.manager [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Refreshing instance network info cache due to event network-changed-51b8166c-7348-41f6-809a-c827f13e7dbe. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 548.558093] env[62923]: DEBUG oslo_concurrency.lockutils [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] Acquiring lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.836662] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.848012] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 6f5cc707-70e8-48fb-8d9d-904f3e0130af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 548.848504] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 548.848899] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 548.968916] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024846} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.969593] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 548.970730] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-348d2ca2-f4e9-410c-818a-97c881a6c057 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.979082] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 548.979082] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a1a342-1191-68e8-dc35-b0f54981e65b" [ 548.979082] env[62923]: _type = "Task" [ 548.979082] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.990544] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a1a342-1191-68e8-dc35-b0f54981e65b, 'name': SearchDatastore_Task, 'duration_secs': 0.00994} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.990883] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.991071] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 548.991284] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1141c41-69e0-435c-a19f-fc99ff8cd25d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.000106] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 549.000106] env[62923]: value = "task-1369839" [ 549.000106] env[62923]: _type = "Task" [ 549.000106] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.009364] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.016162] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.112040] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91edf981-2f37-4fa5-a0a4-710d67a7b7d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.122723] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373cb709-8282-4e79-b86b-8bbfc2ce2b3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.154643] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4b863f-684d-4ce2-ba41-f47995f5ec95 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.162772] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a36d955-8c8d-4dc4-8a27-073c9c1672c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.179276] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.323224] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.339451] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Releasing lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.342311] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 549.342311] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 549.342311] env[62923]: DEBUG oslo_concurrency.lockutils [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] Acquired lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.342311] env[62923]: DEBUG nova.network.neutron [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Refreshing network info cache for port 51b8166c-7348-41f6-809a-c827f13e7dbe {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 549.342311] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d68e38e5-11ce-421a-a67b-1f175bffbaef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.360922] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f66cb1-b092-4457-9e23-33cee06d3e7f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.388488] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e0ce4383-cade-4d85-a8a5-2437b9203d98 could not be found. [ 549.388735] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 549.388915] env[62923]: INFO nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Took 0.05 seconds to destroy the instance on the hypervisor. [ 549.389615] env[62923]: DEBUG oslo.service.loopingcall [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.390024] env[62923]: DEBUG nova.compute.manager [-] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.390139] env[62923]: DEBUG nova.network.neutron [-] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 549.439398] env[62923]: DEBUG nova.network.neutron [-] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.511917] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369839, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.598025] env[62923]: DEBUG nova.compute.manager [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Received event network-changed-8137e2da-4915-439e-9eec-7b3b901c8237 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 549.598025] env[62923]: DEBUG nova.compute.manager [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Refreshing instance network info cache due to event network-changed-8137e2da-4915-439e-9eec-7b3b901c8237. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 549.598025] env[62923]: DEBUG oslo_concurrency.lockutils [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] Acquiring lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.686022] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.831026] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Releasing lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.831026] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 549.831026] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 549.831905] env[62923]: DEBUG oslo_concurrency.lockutils [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] Acquired lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.832269] env[62923]: DEBUG nova.network.neutron [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Refreshing network info cache for port 8137e2da-4915-439e-9eec-7b3b901c8237 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 549.835207] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a90b7f1-a0be-40ec-95bc-53c1e9efa817 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.851484] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca6f0a5-fd59-4cbb-8057-4061ac26e677 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.892099] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3cb4806d-dffa-4c41-9730-f29d2aad059a could not be found. [ 549.893320] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 549.893406] env[62923]: INFO nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 549.894037] env[62923]: DEBUG oslo.service.loopingcall [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.896587] env[62923]: DEBUG nova.compute.manager [-] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.896690] env[62923]: DEBUG nova.network.neutron [-] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 549.934403] env[62923]: DEBUG nova.network.neutron [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.939326] env[62923]: DEBUG nova.network.neutron [-] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.941919] env[62923]: DEBUG nova.network.neutron [-] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.012494] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573651} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.013246] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 550.013493] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 550.013749] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00628033-342e-4707-a1c1-3563e389d276 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.023672] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 550.023672] env[62923]: value = "task-1369840" [ 550.023672] env[62923]: _type = "Task" [ 550.023672] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.029872] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.190098] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 550.190383] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.916s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.190661] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.641s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.251279] env[62923]: DEBUG nova.network.neutron [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.374516] env[62923]: DEBUG nova.network.neutron [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.442423] env[62923]: DEBUG nova.network.neutron [-] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.451142] env[62923]: INFO nova.compute.manager [-] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Took 1.06 seconds to deallocate network for instance. [ 550.453803] env[62923]: DEBUG nova.compute.claims [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 550.453803] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.532569] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070695} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.533832] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 550.533832] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d731e701-c596-454e-8bd6-5be24ff7ba79 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.557323] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 550.557680] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d08ba7ac-118c-4afb-a42f-eb3ae49c8709 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.580834] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 550.580834] env[62923]: value = "task-1369841" [ 550.580834] env[62923]: _type = "Task" [ 550.580834] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.589412] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.643121] env[62923]: DEBUG nova.network.neutron [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.754735] env[62923]: DEBUG oslo_concurrency.lockutils [req-26df7383-1916-4766-9727-5645e6821b59 req-2246bac3-3137-401d-a536-119fa9ffb8a9 service nova] Releasing lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.820731] env[62923]: ERROR nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 550.820731] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.820731] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.820731] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.820731] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.820731] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.820731] env[62923]: ERROR nova.compute.manager raise self.value [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.820731] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 550.820731] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.820731] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 550.821244] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.821244] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 550.821244] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 550.821244] env[62923]: ERROR nova.compute.manager [ 550.821244] env[62923]: Traceback (most recent call last): [ 550.821244] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 550.821244] env[62923]: listener.cb(fileno) [ 550.821244] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.821244] env[62923]: result = function(*args, **kwargs) [ 550.821244] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.821244] env[62923]: return func(*args, **kwargs) [ 550.821244] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.821244] env[62923]: raise e [ 550.821244] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.821244] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 550.821244] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.821244] env[62923]: created_port_ids = self._update_ports_for_instance( [ 550.821244] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.821244] env[62923]: with excutils.save_and_reraise_exception(): [ 550.821244] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.821244] env[62923]: self.force_reraise() [ 550.821244] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.821244] env[62923]: raise self.value [ 550.821244] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.821244] env[62923]: updated_port = self._update_port( [ 550.821244] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.821244] env[62923]: _ensure_no_port_binding_failure(port) [ 550.821244] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.821244] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 550.822947] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 550.822947] env[62923]: Removing descriptor: 17 [ 550.822947] env[62923]: ERROR nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Traceback (most recent call last): [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] yield resources [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self.driver.spawn(context, instance, image_meta, [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.822947] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] vm_ref = self.build_virtual_machine(instance, [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] for vif in network_info: [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return self._sync_wrapper(fn, *args, **kwargs) [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self.wait() [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self[:] = self._gt.wait() [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return self._exit_event.wait() [ 550.823660] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] result = hub.switch() [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return self.greenlet.switch() [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] result = function(*args, **kwargs) [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return func(*args, **kwargs) [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] raise e [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] nwinfo = self.network_api.allocate_for_instance( [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.824057] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] created_port_ids = self._update_ports_for_instance( [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] with excutils.save_and_reraise_exception(): [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self.force_reraise() [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] raise self.value [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] updated_port = self._update_port( [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] _ensure_no_port_binding_failure(port) [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.824468] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] raise exception.PortBindingFailed(port_id=port['id']) [ 550.824861] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 550.824861] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] [ 550.824861] env[62923]: INFO nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Terminating instance [ 550.832319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquiring lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.832319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquired lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.832319] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 550.941389] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e925f6-710a-4cfc-8717-504ebfb9f117 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.951844] env[62923]: INFO nova.compute.manager [-] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Took 1.06 seconds to deallocate network for instance. [ 550.954934] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d0fb32-342e-403e-b6b0-0a9379ef445b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.959617] env[62923]: DEBUG nova.compute.claims [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 550.960262] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.992809] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89cf9d6-0ecc-49a7-9a80-79f939d07a6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.000248] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1a1c7a-ce3a-4f5a-a7a0-aec83bd1c81b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.013413] env[62923]: DEBUG nova.compute.provider_tree [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.096283] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369841, 'name': ReconfigVM_Task, 'duration_secs': 0.283438} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.096584] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Reconfigured VM instance instance-00000002 to attach disk [datastore2] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 551.097245] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93da43e6-e9bb-4ca8-9ee6-16ae44428428 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.103553] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 551.103553] env[62923]: value = "task-1369842" [ 551.103553] env[62923]: _type = "Task" [ 551.103553] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.111937] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369842, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.146041] env[62923]: DEBUG oslo_concurrency.lockutils [req-9cdfea6d-c85b-462e-995b-8a4d3d3fa969 req-2d73ce7f-8b69-4c69-9886-ede0912999b8 service nova] Releasing lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.375044] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.516682] env[62923]: DEBUG nova.scheduler.client.report [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.567777] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.621776] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369842, 'name': Rename_Task, 'duration_secs': 0.142838} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.622063] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 551.623115] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e49a65fe-08b8-4d27-a189-d60e60cc4b5b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.632217] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 551.632217] env[62923]: value = "task-1369843" [ 551.632217] env[62923]: _type = "Task" [ 551.632217] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.640412] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.722835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "147165a4-9071-4516-9498-fa4c706a5e37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.723058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "147165a4-9071-4516-9498-fa4c706a5e37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.736990] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquiring lock "2a8c7285-35dd-4112-b84a-ea384aead074" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.737261] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Lock "2a8c7285-35dd-4112-b84a-ea384aead074" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.024501] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.833s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.024787] env[62923]: ERROR nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Traceback (most recent call last): [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self.driver.spawn(context, instance, image_meta, [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] vm_ref = self.build_virtual_machine(instance, [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.024787] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] for vif in network_info: [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return self._sync_wrapper(fn, *args, **kwargs) [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self.wait() [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self[:] = self._gt.wait() [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return self._exit_event.wait() [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] result = hub.switch() [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.025111] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return self.greenlet.switch() [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] result = function(*args, **kwargs) [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] return func(*args, **kwargs) [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] raise e [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] nwinfo = self.network_api.allocate_for_instance( [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] created_port_ids = self._update_ports_for_instance( [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] with excutils.save_and_reraise_exception(): [ 552.025453] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] self.force_reraise() [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] raise self.value [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] updated_port = self._update_port( [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] _ensure_no_port_binding_failure(port) [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] raise exception.PortBindingFailed(port_id=port['id']) [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] nova.exception.PortBindingFailed: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. [ 552.025831] env[62923]: ERROR nova.compute.manager [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] [ 552.026915] env[62923]: DEBUG nova.compute.utils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 552.030016] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.950s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.031942] env[62923]: INFO nova.compute.claims [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 552.034916] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Build of instance 795b645d-3aee-4dd8-9537-2277f86c5b10 was re-scheduled: Binding failed for port 6bcf7b58-225b-48db-aaef-834cc06a36b1, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 552.037462] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 552.037462] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.037462] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquired lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.037462] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.078442] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Releasing lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.078949] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 552.079338] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 552.079639] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34197e2a-c26d-4500-bb83-ac3fdcf5ff53 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.091218] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d46b901-c3e1-406d-89f2-2e1e3864bad0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.124649] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4fe36f05-d730-4fb1-ab05-0425be619dfb could not be found. [ 552.125303] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 552.125303] env[62923]: INFO nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Took 0.05 seconds to destroy the instance on the hypervisor. [ 552.125303] env[62923]: DEBUG oslo.service.loopingcall [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.125540] env[62923]: DEBUG nova.compute.manager [-] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 552.125634] env[62923]: DEBUG nova.network.neutron [-] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 552.142047] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369843, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.170729] env[62923]: DEBUG nova.network.neutron [-] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.189203] env[62923]: ERROR nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 552.189203] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.189203] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.189203] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.189203] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.189203] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.189203] env[62923]: ERROR nova.compute.manager raise self.value [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.189203] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.189203] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.189203] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.189627] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.189627] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.189627] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 552.189627] env[62923]: ERROR nova.compute.manager [ 552.189627] env[62923]: Traceback (most recent call last): [ 552.189627] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.189627] env[62923]: listener.cb(fileno) [ 552.189627] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.189627] env[62923]: result = function(*args, **kwargs) [ 552.189627] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.189627] env[62923]: return func(*args, **kwargs) [ 552.189627] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.189627] env[62923]: raise e [ 552.189627] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.189627] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 552.189627] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.189627] env[62923]: created_port_ids = self._update_ports_for_instance( [ 552.189627] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.189627] env[62923]: with excutils.save_and_reraise_exception(): [ 552.189627] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.189627] env[62923]: self.force_reraise() [ 552.189627] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.189627] env[62923]: raise self.value [ 552.189627] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.189627] env[62923]: updated_port = self._update_port( [ 552.189627] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.189627] env[62923]: _ensure_no_port_binding_failure(port) [ 552.189627] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.189627] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.190505] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 552.190505] env[62923]: Removing descriptor: 21 [ 552.198637] env[62923]: ERROR nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Traceback (most recent call last): [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] yield resources [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self.driver.spawn(context, instance, image_meta, [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] vm_ref = self.build_virtual_machine(instance, [ 552.198637] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] for vif in network_info: [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return self._sync_wrapper(fn, *args, **kwargs) [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self.wait() [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self[:] = self._gt.wait() [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return self._exit_event.wait() [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.198989] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] result = hub.switch() [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return self.greenlet.switch() [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] result = function(*args, **kwargs) [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return func(*args, **kwargs) [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] raise e [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] nwinfo = self.network_api.allocate_for_instance( [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] created_port_ids = self._update_ports_for_instance( [ 552.199359] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] with excutils.save_and_reraise_exception(): [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self.force_reraise() [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] raise self.value [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] updated_port = self._update_port( [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] _ensure_no_port_binding_failure(port) [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] raise exception.PortBindingFailed(port_id=port['id']) [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 552.199988] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] [ 552.200556] env[62923]: INFO nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Terminating instance [ 552.200556] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.200556] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquired lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.200556] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.568657] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.644718] env[62923]: DEBUG oslo_vmware.api [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369843, 'name': PowerOnVM_Task, 'duration_secs': 0.65144} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.646113] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 552.646113] env[62923]: DEBUG nova.compute.manager [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 552.648798] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7453c869-e6cd-4596-ae50-5d138afe2e8d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.676205] env[62923]: DEBUG nova.network.neutron [-] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.830305] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 552.873831] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.883843] env[62923]: DEBUG nova.compute.manager [req-d99c966a-1a17-4002-bc55-e6a7db2b8576 req-8318ee52-058d-4d39-b194-5d65c5ce632d service nova] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Received event network-vif-deleted-51b8166c-7348-41f6-809a-c827f13e7dbe {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 552.943011] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquiring lock "ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.943273] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Lock "ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.948477] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.167353] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.180207] env[62923]: INFO nova.compute.manager [-] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Took 1.05 seconds to deallocate network for instance. [ 553.185028] env[62923]: DEBUG nova.compute.claims [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 553.185241] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.376646] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e25a28d-0013-4503-8f3a-7e8cd29d9100 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.382029] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Releasing lock "refresh_cache-795b645d-3aee-4dd8-9537-2277f86c5b10" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.382323] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 553.384909] env[62923]: DEBUG nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.384909] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 553.393073] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d47e5c3-5d92-4930-989e-dec360995a81 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.428901] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.430667] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9925f09-69e0-4d60-919c-13bc9c133073 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.438559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73161d0-8a80-4e05-a841-f7ce206c0432 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.452789] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Releasing lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.453285] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 553.453495] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 553.453955] env[62923]: DEBUG nova.compute.provider_tree [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 553.455289] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a843c5e5-a3a6-46f2-a54c-c6f70e2c65d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.464594] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44350aca-d50f-45db-aa50-1aa84c350cf7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.492922] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04d96512-dc09-42ff-96d0-961f7359318c could not be found. [ 553.492922] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 553.492922] env[62923]: INFO nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 553.492922] env[62923]: DEBUG oslo.service.loopingcall [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.492922] env[62923]: DEBUG nova.compute.manager [-] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.492922] env[62923]: DEBUG nova.network.neutron [-] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 553.523808] env[62923]: DEBUG nova.network.neutron [-] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.917320] env[62923]: DEBUG nova.compute.manager [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Received event network-vif-deleted-8137e2da-4915-439e-9eec-7b3b901c8237 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.917407] env[62923]: DEBUG nova.compute.manager [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Received event network-changed-63c15049-830e-4a51-bf4e-e3cfee34896a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.918565] env[62923]: DEBUG nova.compute.manager [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Refreshing instance network info cache due to event network-changed-63c15049-830e-4a51-bf4e-e3cfee34896a. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 553.918855] env[62923]: DEBUG oslo_concurrency.lockutils [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] Acquiring lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.918953] env[62923]: DEBUG oslo_concurrency.lockutils [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] Acquired lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.919111] env[62923]: DEBUG nova.network.neutron [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Refreshing network info cache for port 63c15049-830e-4a51-bf4e-e3cfee34896a {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.932299] env[62923]: DEBUG nova.compute.manager [None req-2d114350-0910-473b-9054-6f50353c333b tempest-ServerDiagnosticsV248Test-472437528 tempest-ServerDiagnosticsV248Test-472437528-project-admin] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 553.934133] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccc9972-2353-4b89-badd-4d5c8f0b48e9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.937107] env[62923]: DEBUG nova.network.neutron [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.945080] env[62923]: INFO nova.compute.manager [None req-2d114350-0910-473b-9054-6f50353c333b tempest-ServerDiagnosticsV248Test-472437528 tempest-ServerDiagnosticsV248Test-472437528-project-admin] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Retrieving diagnostics [ 553.945995] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3d8a69-eaa2-4486-a597-5a7ca454bfe8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.979845] env[62923]: DEBUG nova.scheduler.client.report [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 554.026273] env[62923]: DEBUG nova.network.neutron [-] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.441289] env[62923]: INFO nova.compute.manager [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: 795b645d-3aee-4dd8-9537-2277f86c5b10] Took 1.06 seconds to deallocate network for instance. [ 554.463965] env[62923]: DEBUG nova.network.neutron [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.488113] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.488667] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 554.494782] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.195s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.530130] env[62923]: INFO nova.compute.manager [-] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Took 1.04 seconds to deallocate network for instance. [ 554.535961] env[62923]: DEBUG nova.compute.claims [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 554.536163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.583725] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquiring lock "353b72b2-cd56-442f-9010-c75baf8f5a48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.583725] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Lock "353b72b2-cd56-442f-9010-c75baf8f5a48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.593626] env[62923]: DEBUG nova.network.neutron [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.005027] env[62923]: DEBUG nova.compute.utils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.007864] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 555.007864] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 555.097560] env[62923]: DEBUG oslo_concurrency.lockutils [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] Releasing lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.097560] env[62923]: DEBUG nova.compute.manager [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Received event network-vif-deleted-63c15049-830e-4a51-bf4e-e3cfee34896a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 555.097560] env[62923]: DEBUG nova.compute.manager [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Received event network-changed-2bbe3814-1872-4090-baef-b83249364387 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 555.097560] env[62923]: DEBUG nova.compute.manager [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Refreshing instance network info cache due to event network-changed-2bbe3814-1872-4090-baef-b83249364387. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 555.097560] env[62923]: DEBUG oslo_concurrency.lockutils [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] Acquiring lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.097949] env[62923]: DEBUG oslo_concurrency.lockutils [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] Acquired lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.097949] env[62923]: DEBUG nova.network.neutron [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Refreshing network info cache for port 2bbe3814-1872-4090-baef-b83249364387 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 555.131164] env[62923]: DEBUG nova.policy [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1d0006f37a245429e9a2d890d9ed438', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4e0c7bf0f94416bd94174df3d0c3f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 555.286247] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb9cd90-df71-4040-b2c6-7bc495c3da51 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.294299] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b2058d-b727-4ae3-92f2-5caa4dabcc1b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.328536] env[62923]: INFO nova.compute.manager [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Rebuilding instance [ 555.332124] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d835efb-ace6-4f8f-87f8-da502a4363c9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.342840] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4efeee0-3322-4b91-a8e4-61700817f70b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.355524] env[62923]: DEBUG nova.compute.provider_tree [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.387951] env[62923]: DEBUG nova.compute.manager [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 555.388608] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10af590d-7e82-4f81-b92b-27e04020a57c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.491243] env[62923]: INFO nova.scheduler.client.report [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Deleted allocations for instance 795b645d-3aee-4dd8-9537-2277f86c5b10 [ 555.509423] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 555.638982] env[62923]: DEBUG nova.network.neutron [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 555.860147] env[62923]: DEBUG nova.scheduler.client.report [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.894831] env[62923]: DEBUG nova.network.neutron [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.900467] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 555.900671] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88b4e840-b44a-4afa-bffe-323370a078b4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.912663] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 555.912663] env[62923]: value = "task-1369844" [ 555.912663] env[62923]: _type = "Task" [ 555.912663] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.925077] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.948275] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Successfully created port: aa99e258-e40f-4e1b-8337-168e3553f6ce {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 556.001711] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e55c35b8-dac2-4c56-bf21-8631bd3fa7fd tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "795b645d-3aee-4dd8-9537-2277f86c5b10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.141s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.080785] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquiring lock "03ee0097-1200-43ce-9baa-e9da80105516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.081038] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Lock "03ee0097-1200-43ce-9baa-e9da80105516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.367850] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.871s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.367850] env[62923]: ERROR nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] Traceback (most recent call last): [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self.driver.spawn(context, instance, image_meta, [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.367850] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] vm_ref = self.build_virtual_machine(instance, [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] for vif in network_info: [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return self._sync_wrapper(fn, *args, **kwargs) [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self.wait() [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self[:] = self._gt.wait() [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return self._exit_event.wait() [ 556.368134] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] result = hub.switch() [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return self.greenlet.switch() [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] result = function(*args, **kwargs) [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] return func(*args, **kwargs) [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] raise e [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] nwinfo = self.network_api.allocate_for_instance( [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 556.368447] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] created_port_ids = self._update_ports_for_instance( [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] with excutils.save_and_reraise_exception(): [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] self.force_reraise() [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] raise self.value [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] updated_port = self._update_port( [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] _ensure_no_port_binding_failure(port) [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.368759] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] raise exception.PortBindingFailed(port_id=port['id']) [ 556.369113] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] nova.exception.PortBindingFailed: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. [ 556.369113] env[62923]: ERROR nova.compute.manager [instance: f81af398-7382-4433-9a24-07d16fd1223b] [ 556.369113] env[62923]: DEBUG nova.compute.utils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 556.371924] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.605s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.373813] env[62923]: INFO nova.compute.claims [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.377302] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Build of instance f81af398-7382-4433-9a24-07d16fd1223b was re-scheduled: Binding failed for port a44148b3-774a-48ee-acac-8d757051c452, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 556.377858] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 556.377985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquiring lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.378173] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Acquired lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.378361] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.398803] env[62923]: DEBUG oslo_concurrency.lockutils [req-04e8c7e7-5637-4cf9-9a7b-cfebf8d063df req-6aa7306b-648f-4d29-8ee6-78c15b32fd32 service nova] Releasing lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.430682] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369844, 'name': PowerOffVM_Task, 'duration_secs': 0.129501} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.430682] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 556.430682] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 556.434616] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7a7c39-07e0-49fd-b329-0f17dd2d6679 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.442979] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 556.442979] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dd8e226-9218-42ff-ba64-931d6c1c17a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.472896] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 556.472896] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 556.473067] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Deleting the datastore file [datastore2] a33da17c-bbb2-4307-b4b3-56cec5cb757e {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 556.473487] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-323e8ce4-4abf-4ac7-a608-4ef7f668c860 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.482222] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 556.482222] env[62923]: value = "task-1369846" [ 556.482222] env[62923]: _type = "Task" [ 556.482222] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.494319] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.505203] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 556.523074] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 556.558158] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.558462] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.558550] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.558846] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.558904] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.558992] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.559744] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.560034] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.560347] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.560586] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.560818] env[62923]: DEBUG nova.virt.hardware [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.562596] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968a30b7-a28b-43c9-b71a-b8e6f7b6c9ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.573512] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db06416c-247d-416e-8166-4fac85996dd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.974208] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.975568] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.975568] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.975568] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.975568] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.978899] env[62923]: INFO nova.compute.manager [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Terminating instance [ 556.985301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "refresh_cache-6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.985301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquired lock "refresh_cache-6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.985301] env[62923]: DEBUG nova.network.neutron [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 557.001124] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092967} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.002559] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 557.002559] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 557.002559] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 557.045110] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.050681] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.263925] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.522139] env[62923]: DEBUG nova.network.neutron [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.649187] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5863caa-a567-4690-b3af-60303db156fd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.652655] env[62923]: DEBUG nova.network.neutron [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.658587] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92389db-9ce3-457c-94cf-aef341ed4653 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.692202] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235d6c36-7fc2-477a-840f-1e5a3dbd3f02 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.699446] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3026461-7f9b-404b-a9a0-a836ab86ba13 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.712747] env[62923]: DEBUG nova.compute.provider_tree [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.768648] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Releasing lock "refresh_cache-f81af398-7382-4433-9a24-07d16fd1223b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.768648] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 557.771019] env[62923]: DEBUG nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 557.771019] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 557.795804] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.980823] env[62923]: DEBUG nova.compute.manager [req-a889cfe2-33f2-4a24-a5a8-068850b1e77d req-20570e96-519f-4dce-95a3-0086d2557d3b service nova] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Received event network-vif-deleted-2bbe3814-1872-4090-baef-b83249364387 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 558.042196] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.042503] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.042617] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.042817] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.042934] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.043744] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.044063] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.044262] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.044409] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.044591] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.044814] env[62923]: DEBUG nova.virt.hardware [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.045650] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63259c7c-65c6-493c-af08-f1a9a8c9bedc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.054111] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042c5aed-dc60-40fd-a37d-1904eb2aa45d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.069708] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 558.075516] env[62923]: DEBUG oslo.service.loopingcall [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.075606] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 558.075815] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c9a9890-722b-41d6-bbea-2fd59aa575d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.096393] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 558.096393] env[62923]: value = "task-1369847" [ 558.096393] env[62923]: _type = "Task" [ 558.096393] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.104571] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369847, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.155670] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Releasing lock "refresh_cache-6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.156116] env[62923]: DEBUG nova.compute.manager [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 558.156315] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 558.157330] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3f1597-2cb3-48e0-ad0a-f58d6d5dc0bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.164402] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 558.164646] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59c2d596-0f37-429d-92e3-2a7811a31935 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.170539] env[62923]: DEBUG oslo_vmware.api [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 558.170539] env[62923]: value = "task-1369848" [ 558.170539] env[62923]: _type = "Task" [ 558.170539] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.178272] env[62923]: DEBUG oslo_vmware.api [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369848, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.217077] env[62923]: DEBUG nova.scheduler.client.report [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.301310] env[62923]: DEBUG nova.network.neutron [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.353231] env[62923]: ERROR nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 558.353231] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.353231] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.353231] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.353231] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.353231] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.353231] env[62923]: ERROR nova.compute.manager raise self.value [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.353231] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 558.353231] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.353231] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 558.353948] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.353948] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 558.353948] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 558.353948] env[62923]: ERROR nova.compute.manager [ 558.353948] env[62923]: Traceback (most recent call last): [ 558.353948] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 558.353948] env[62923]: listener.cb(fileno) [ 558.353948] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.353948] env[62923]: result = function(*args, **kwargs) [ 558.353948] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.353948] env[62923]: return func(*args, **kwargs) [ 558.353948] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.353948] env[62923]: raise e [ 558.353948] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.353948] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 558.353948] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.353948] env[62923]: created_port_ids = self._update_ports_for_instance( [ 558.353948] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.353948] env[62923]: with excutils.save_and_reraise_exception(): [ 558.353948] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.353948] env[62923]: self.force_reraise() [ 558.353948] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.353948] env[62923]: raise self.value [ 558.353948] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.353948] env[62923]: updated_port = self._update_port( [ 558.353948] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.353948] env[62923]: _ensure_no_port_binding_failure(port) [ 558.353948] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.353948] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 558.355792] env[62923]: nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 558.355792] env[62923]: Removing descriptor: 17 [ 558.355792] env[62923]: ERROR nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Traceback (most recent call last): [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] yield resources [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self.driver.spawn(context, instance, image_meta, [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.355792] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] vm_ref = self.build_virtual_machine(instance, [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] for vif in network_info: [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return self._sync_wrapper(fn, *args, **kwargs) [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self.wait() [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self[:] = self._gt.wait() [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return self._exit_event.wait() [ 558.356230] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] result = hub.switch() [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return self.greenlet.switch() [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] result = function(*args, **kwargs) [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return func(*args, **kwargs) [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] raise e [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] nwinfo = self.network_api.allocate_for_instance( [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.356630] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] created_port_ids = self._update_ports_for_instance( [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] with excutils.save_and_reraise_exception(): [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self.force_reraise() [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] raise self.value [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] updated_port = self._update_port( [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] _ensure_no_port_binding_failure(port) [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.357050] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] raise exception.PortBindingFailed(port_id=port['id']) [ 558.357388] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 558.357388] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] [ 558.357388] env[62923]: INFO nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Terminating instance [ 558.357388] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.357388] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquired lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.357388] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 558.605704] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "0dba167b-aa56-4463-9749-b74fbc7430d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.606101] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "0dba167b-aa56-4463-9749-b74fbc7430d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.610605] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369847, 'name': CreateVM_Task, 'duration_secs': 0.262396} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.610770] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 558.611361] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.611361] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.611669] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 558.611846] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10a74501-dd7d-4242-a3da-00adc257fc49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.616577] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 558.616577] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5287bb41-75eb-b4e1-0323-df96a39ab857" [ 558.616577] env[62923]: _type = "Task" [ 558.616577] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.624930] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5287bb41-75eb-b4e1-0323-df96a39ab857, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.679824] env[62923]: DEBUG oslo_vmware.api [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369848, 'name': PowerOffVM_Task, 'duration_secs': 0.127438} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.680099] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 558.680266] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 558.680506] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfd64b38-711e-4067-be5f-07407d49c9eb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.707197] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 558.707420] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 558.707598] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Deleting the datastore file [datastore1] 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 558.707850] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be48614d-d120-4fc6-a026-f8f8a67d6b8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.715698] env[62923]: DEBUG oslo_vmware.api [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for the task: (returnval){ [ 558.715698] env[62923]: value = "task-1369850" [ 558.715698] env[62923]: _type = "Task" [ 558.715698] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.724046] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.724595] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 558.727426] env[62923]: DEBUG oslo_vmware.api [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.727751] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.140s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.729767] env[62923]: INFO nova.compute.claims [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.807292] env[62923]: INFO nova.compute.manager [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] [instance: f81af398-7382-4433-9a24-07d16fd1223b] Took 1.04 seconds to deallocate network for instance. [ 558.885541] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.020875] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.136447] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5287bb41-75eb-b4e1-0323-df96a39ab857, 'name': SearchDatastore_Task, 'duration_secs': 0.011691} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.136814] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.137064] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 559.137297] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.137456] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.137645] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 559.137903] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cac5918-cc0d-4fab-a33e-140fd4ebec45 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.147845] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 559.148069] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 559.148922] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3347cafe-67ed-44f3-8637-b7c5fbb6c6f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.156068] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 559.156068] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d8f6d4-e4da-abee-4ddf-fb3281e0605e" [ 559.156068] env[62923]: _type = "Task" [ 559.156068] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.166218] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d8f6d4-e4da-abee-4ddf-fb3281e0605e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.227660] env[62923]: DEBUG oslo_vmware.api [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Task: {'id': task-1369850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099714} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.227972] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 559.229129] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 559.229388] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 559.229568] env[62923]: INFO nova.compute.manager [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Took 1.07 seconds to destroy the instance on the hypervisor. [ 559.229803] env[62923]: DEBUG oslo.service.loopingcall [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.229983] env[62923]: DEBUG nova.compute.manager [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 559.230084] env[62923]: DEBUG nova.network.neutron [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.235386] env[62923]: DEBUG nova.compute.utils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 559.242569] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 559.242868] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 559.249041] env[62923]: DEBUG nova.network.neutron [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.298054] env[62923]: DEBUG nova.policy [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b3bda9ef0aa45e38d3865e522352f30', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c038677d67841078a3f203567a2c81e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 559.524354] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Releasing lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.527022] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 559.527022] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 559.527022] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52690265-5892-4fef-a278-9088f4e577b0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.538847] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8c1e83-2277-4c70-8c8f-502e0e4a51fe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.561832] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66b7e04a-968b-44b0-9bb2-e467810bbf6b could not be found. [ 559.561832] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 559.561997] env[62923]: INFO nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 559.562202] env[62923]: DEBUG oslo.service.loopingcall [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 559.562471] env[62923]: DEBUG nova.compute.manager [-] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 559.562681] env[62923]: DEBUG nova.network.neutron [-] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.597909] env[62923]: DEBUG nova.network.neutron [-] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.669701] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d8f6d4-e4da-abee-4ddf-fb3281e0605e, 'name': SearchDatastore_Task, 'duration_secs': 0.008842} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.671067] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15e8a777-1c44-4d7a-814b-4d7629fc8a40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.679033] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 559.679033] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ea8532-6abc-0ecf-427e-458dca684bf1" [ 559.679033] env[62923]: _type = "Task" [ 559.679033] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.688472] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ea8532-6abc-0ecf-427e-458dca684bf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.743894] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 559.750476] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Successfully created port: 49f26d9b-6984-4a4b-8022-1333cb10294a {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 559.753597] env[62923]: DEBUG nova.network.neutron [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.847751] env[62923]: INFO nova.scheduler.client.report [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Deleted allocations for instance f81af398-7382-4433-9a24-07d16fd1223b [ 559.954641] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquiring lock "386ffe8a-a160-4dea-88e6-529219eaf99f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.954946] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Lock "386ffe8a-a160-4dea-88e6-529219eaf99f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.096600] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f37c21a-c4f2-4508-997c-0c8901486895 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.101735] env[62923]: DEBUG nova.network.neutron [-] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.108314] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda01a56-a8aa-4d05-b9b0-bd65411e16b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.148074] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4401620b-feb0-4eaf-9e2e-ab22e9e8208b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.158979] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0e7689-daaa-4d50-bdaa-3de36dee6b21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.175902] env[62923]: DEBUG nova.compute.provider_tree [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 560.191651] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ea8532-6abc-0ecf-427e-458dca684bf1, 'name': SearchDatastore_Task, 'duration_secs': 0.010116} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.191651] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.191651] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 560.191651] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab44bffb-5629-47dd-9afb-12b6279ecafb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.200882] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 560.200882] env[62923]: value = "task-1369851" [ 560.200882] env[62923]: _type = "Task" [ 560.200882] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.211930] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.264477] env[62923]: INFO nova.compute.manager [-] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Took 1.03 seconds to deallocate network for instance. [ 560.360688] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afeabe55-54d9-4233-9797-f4d70ebedab2 tempest-ImagesOneServerNegativeTestJSON-1274835773 tempest-ImagesOneServerNegativeTestJSON-1274835773-project-member] Lock "f81af398-7382-4433-9a24-07d16fd1223b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.070s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.605066] env[62923]: INFO nova.compute.manager [-] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Took 1.04 seconds to deallocate network for instance. [ 560.607643] env[62923]: DEBUG nova.compute.claims [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 560.607862] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.678730] env[62923]: DEBUG nova.scheduler.client.report [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.710902] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473706} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.711199] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 560.711470] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 560.711640] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea318890-341a-45b5-8c47-d5e8721e156a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.717594] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 560.717594] env[62923]: value = "task-1369852" [ 560.717594] env[62923]: _type = "Task" [ 560.717594] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.726071] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.761427] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 560.772450] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.782864] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 560.783308] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 560.783308] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.783308] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 560.783561] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.783773] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 560.784502] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 560.784502] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 560.784502] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 560.784502] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 560.784502] env[62923]: DEBUG nova.virt.hardware [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 560.785396] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd4dc25-3578-4e7f-9790-ba18b62dd2ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.793497] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b8b227-e4f7-4a0c-a2c6-17901b6c339a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.864372] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 560.942933] env[62923]: DEBUG nova.compute.manager [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Received event network-changed-aa99e258-e40f-4e1b-8337-168e3553f6ce {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 560.943050] env[62923]: DEBUG nova.compute.manager [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Refreshing instance network info cache due to event network-changed-aa99e258-e40f-4e1b-8337-168e3553f6ce. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 560.943274] env[62923]: DEBUG oslo_concurrency.lockutils [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] Acquiring lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.943401] env[62923]: DEBUG oslo_concurrency.lockutils [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] Acquired lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.943561] env[62923]: DEBUG nova.network.neutron [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Refreshing network info cache for port aa99e258-e40f-4e1b-8337-168e3553f6ce {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 561.185061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.185061] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 561.189917] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.930s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.191568] env[62923]: INFO nova.compute.claims [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.232985] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062406} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.235534] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 561.235859] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e87e0d-1ae0-4ee6-8827-8a4d4283ae82 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.262576] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 561.263521] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eecbf40d-1170-42a2-ae22-e4dddbaf3d38 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.285146] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 561.285146] env[62923]: value = "task-1369853" [ 561.285146] env[62923]: _type = "Task" [ 561.285146] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.292518] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369853, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.398567] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.491478] env[62923]: DEBUG nova.network.neutron [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 561.555717] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.555717] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.672987] env[62923]: DEBUG nova.network.neutron [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.691594] env[62923]: DEBUG nova.compute.utils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.692904] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 561.696839] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 561.802146] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369853, 'name': ReconfigVM_Task, 'duration_secs': 0.26957} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.802146] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Reconfigured VM instance instance-00000002 to attach disk [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e/a33da17c-bbb2-4307-b4b3-56cec5cb757e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 561.802146] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fae12673-4bc9-4c10-b6c5-1a742d9b315f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.809271] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 561.809271] env[62923]: value = "task-1369854" [ 561.809271] env[62923]: _type = "Task" [ 561.809271] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.819783] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369854, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.861286] env[62923]: DEBUG nova.policy [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51e91e333cbd4850ae0b45f2650178c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bfeac5ab1021421a9c0b10d4abe9c8d5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 562.177345] env[62923]: DEBUG oslo_concurrency.lockutils [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] Releasing lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.178038] env[62923]: DEBUG nova.compute.manager [req-3bf6b29f-0e73-4ed0-8e0d-fdc28e48ce20 req-3b48f121-1e64-4c6f-b7da-129b7a4390b3 service nova] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Received event network-vif-deleted-aa99e258-e40f-4e1b-8337-168e3553f6ce {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 562.205019] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 562.320601] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369854, 'name': Rename_Task, 'duration_secs': 0.13266} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.321983] env[62923]: ERROR nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 562.321983] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.321983] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 562.321983] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 562.321983] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.321983] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.321983] env[62923]: ERROR nova.compute.manager raise self.value [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 562.321983] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 562.321983] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.321983] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 562.322573] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.322573] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 562.322573] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 562.322573] env[62923]: ERROR nova.compute.manager [ 562.322573] env[62923]: Traceback (most recent call last): [ 562.322573] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 562.322573] env[62923]: listener.cb(fileno) [ 562.322573] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.322573] env[62923]: result = function(*args, **kwargs) [ 562.322573] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 562.322573] env[62923]: return func(*args, **kwargs) [ 562.322573] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.322573] env[62923]: raise e [ 562.322573] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.322573] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 562.322573] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 562.322573] env[62923]: created_port_ids = self._update_ports_for_instance( [ 562.322573] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 562.322573] env[62923]: with excutils.save_and_reraise_exception(): [ 562.322573] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.322573] env[62923]: self.force_reraise() [ 562.322573] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.322573] env[62923]: raise self.value [ 562.322573] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 562.322573] env[62923]: updated_port = self._update_port( [ 562.322573] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.322573] env[62923]: _ensure_no_port_binding_failure(port) [ 562.322573] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.322573] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 562.324262] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 562.324262] env[62923]: Removing descriptor: 21 [ 562.324262] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 562.324262] env[62923]: ERROR nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Traceback (most recent call last): [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] yield resources [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self.driver.spawn(context, instance, image_meta, [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 562.324262] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self._vmops.spawn(context, instance, image_meta, injected_files, [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] vm_ref = self.build_virtual_machine(instance, [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] vif_infos = vmwarevif.get_vif_info(self._session, [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] for vif in network_info: [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return self._sync_wrapper(fn, *args, **kwargs) [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self.wait() [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self[:] = self._gt.wait() [ 562.324687] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return self._exit_event.wait() [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] result = hub.switch() [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return self.greenlet.switch() [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] result = function(*args, **kwargs) [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return func(*args, **kwargs) [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] raise e [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 562.325139] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] nwinfo = self.network_api.allocate_for_instance( [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] created_port_ids = self._update_ports_for_instance( [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] with excutils.save_and_reraise_exception(): [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self.force_reraise() [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] raise self.value [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] updated_port = self._update_port( [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] _ensure_no_port_binding_failure(port) [ 562.325658] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 562.326090] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] raise exception.PortBindingFailed(port_id=port['id']) [ 562.326090] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 562.326090] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] [ 562.326090] env[62923]: INFO nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Terminating instance [ 562.326090] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26c926e4-dc0a-4a59-abe3-fe1a4595a954 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.327588] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquiring lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.329020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquired lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.329020] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.337332] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Waiting for the task: (returnval){ [ 562.337332] env[62923]: value = "task-1369855" [ 562.337332] env[62923]: _type = "Task" [ 562.337332] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.346187] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.600048] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddad285-66b7-42d3-9761-4fae37c4ae02 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.607712] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31545d48-42df-478e-8634-22b2a17260f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.650919] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808ab196-0a26-4f30-9250-15f3660ae79c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.658240] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667edaf4-b845-4cc2-80f8-16d680f58b6b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.673612] env[62923]: DEBUG nova.compute.provider_tree [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.675636] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Successfully created port: c2187654-721c-41d1-8036-921e8df47aaf {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.850411] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369855, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.891672] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 563.180298] env[62923]: DEBUG nova.scheduler.client.report [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.217226] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 563.245065] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 563.245365] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 563.245610] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.245741] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 563.245889] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.246048] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 563.246258] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 563.246414] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 563.246576] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 563.246730] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 563.246899] env[62923]: DEBUG nova.virt.hardware [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 563.247878] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916b725e-869c-45da-8959-e35c87c94832 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.256560] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b411ff7-1533-45c2-8dc9-062f8f928734 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.352022] env[62923]: DEBUG oslo_vmware.api [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Task: {'id': task-1369855, 'name': PowerOnVM_Task, 'duration_secs': 0.947966} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.352022] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 563.352022] env[62923]: DEBUG nova.compute.manager [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 563.352022] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72acea63-aa43-4b1a-872e-dda82b1fee95 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.420615] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.686509] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.687263] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 563.691700] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.238s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.870062] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.928224] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Releasing lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.929123] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 563.930031] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 563.930554] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99f7d8ea-2374-43e4-97ba-423b3c2f5033 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.946159] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387a8ee3-772e-42fd-9896-e49dad5b4e0a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.972234] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e08d9f27-d2b3-4532-862f-b68e830e8d17 could not be found. [ 563.972844] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 563.972844] env[62923]: INFO nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Took 0.04 seconds to destroy the instance on the hypervisor. [ 563.973048] env[62923]: DEBUG oslo.service.loopingcall [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 563.973198] env[62923]: DEBUG nova.compute.manager [-] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 563.973530] env[62923]: DEBUG nova.network.neutron [-] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 564.018191] env[62923]: DEBUG nova.compute.manager [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Received event network-changed-49f26d9b-6984-4a4b-8022-1333cb10294a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 564.018191] env[62923]: DEBUG nova.compute.manager [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Refreshing instance network info cache due to event network-changed-49f26d9b-6984-4a4b-8022-1333cb10294a. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 564.018191] env[62923]: DEBUG oslo_concurrency.lockutils [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] Acquiring lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.018191] env[62923]: DEBUG oslo_concurrency.lockutils [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] Acquired lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.018191] env[62923]: DEBUG nova.network.neutron [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Refreshing network info cache for port 49f26d9b-6984-4a4b-8022-1333cb10294a {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 564.024069] env[62923]: DEBUG nova.network.neutron [-] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.197017] env[62923]: DEBUG nova.compute.utils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 564.198458] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 564.198631] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 564.356216] env[62923]: DEBUG nova.policy [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37a639b2a5bf4792a4d907cd0e9f9cf6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0722cb91ac164042b59ca07913dfef01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 564.514118] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d500f463-962b-4b7f-b367-f2940b606d39 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.528945] env[62923]: DEBUG nova.network.neutron [-] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 564.533654] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a754d045-44f9-40c2-8af1-873d759c6ed3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.565151] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070cb808-2ecc-4367-9e11-f2fc269175d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.573557] env[62923]: DEBUG nova.network.neutron [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.575834] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca38f46-8336-48a2-ad7a-0a5241230517 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.590886] env[62923]: DEBUG nova.compute.provider_tree [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.705615] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 564.854695] env[62923]: DEBUG nova.network.neutron [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.035636] env[62923]: INFO nova.compute.manager [-] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Took 1.06 seconds to deallocate network for instance. [ 565.038832] env[62923]: DEBUG nova.compute.claims [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 565.039207] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.094293] env[62923]: DEBUG nova.scheduler.client.report [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 565.360758] env[62923]: DEBUG oslo_concurrency.lockutils [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] Releasing lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.361032] env[62923]: DEBUG nova.compute.manager [req-2cfb3b01-5168-47b5-b43e-34fe8caf223f req-dec00c6b-750d-4883-a717-af3cb7ad4f83 service nova] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Received event network-vif-deleted-49f26d9b-6984-4a4b-8022-1333cb10294a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 565.600498] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.909s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.601198] env[62923]: ERROR nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Traceback (most recent call last): [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self.driver.spawn(context, instance, image_meta, [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] vm_ref = self.build_virtual_machine(instance, [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.601198] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] for vif in network_info: [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return self._sync_wrapper(fn, *args, **kwargs) [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self.wait() [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self[:] = self._gt.wait() [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return self._exit_event.wait() [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] result = hub.switch() [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.601688] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return self.greenlet.switch() [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] result = function(*args, **kwargs) [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] return func(*args, **kwargs) [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] raise e [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] nwinfo = self.network_api.allocate_for_instance( [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] created_port_ids = self._update_ports_for_instance( [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] with excutils.save_and_reraise_exception(): [ 565.602628] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] self.force_reraise() [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] raise self.value [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] updated_port = self._update_port( [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] _ensure_no_port_binding_failure(port) [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] raise exception.PortBindingFailed(port_id=port['id']) [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] nova.exception.PortBindingFailed: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. [ 565.604175] env[62923]: ERROR nova.compute.manager [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] [ 565.604485] env[62923]: DEBUG nova.compute.utils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 565.604596] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.644s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.609445] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Build of instance e0ce4383-cade-4d85-a8a5-2437b9203d98 was re-scheduled: Binding failed for port 51b8166c-7348-41f6-809a-c827f13e7dbe, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 565.611083] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 565.611083] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.611083] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquired lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.611083] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.723424] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 565.728606] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Successfully created port: 42f42e23-879b-49d3-9d2f-4f1e206bbfae {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.764188] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:02:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='320239678',id=26,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1996038728',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.764188] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.764188] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.764402] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.764402] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.764835] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.765102] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.765271] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.765596] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.766130] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.766192] env[62923]: DEBUG nova.virt.hardware [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.768485] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac716da-9001-409b-b00d-d7b046aad63f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.776244] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d501ea82-0305-4428-9fa4-272db9c5fab9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.912765] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.913056] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.913298] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.913521] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.913676] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.916456] env[62923]: INFO nova.compute.manager [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Terminating instance [ 565.918311] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "refresh_cache-a33da17c-bbb2-4307-b4b3-56cec5cb757e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.918528] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquired lock "refresh_cache-a33da17c-bbb2-4307-b4b3-56cec5cb757e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.918611] env[62923]: DEBUG nova.network.neutron [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.151615] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.477017] env[62923]: DEBUG nova.network.neutron [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.500647] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.503452] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adf63dd-656a-478b-9cce-c582d9216fe8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.512822] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d96bca-ae8d-473f-816a-39c12a7d19b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.558567] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5d29cb-d235-4ad1-a28e-e34303ddb6b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.566816] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474da617-df0c-40f7-9933-890a610e10f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.584031] env[62923]: DEBUG nova.compute.provider_tree [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.701063] env[62923]: DEBUG nova.network.neutron [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.778775] env[62923]: ERROR nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 566.778775] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.778775] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.778775] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.778775] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.778775] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.778775] env[62923]: ERROR nova.compute.manager raise self.value [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.778775] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 566.778775] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.778775] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 566.779292] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.779292] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 566.779292] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 566.779292] env[62923]: ERROR nova.compute.manager [ 566.779292] env[62923]: Traceback (most recent call last): [ 566.779292] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 566.779292] env[62923]: listener.cb(fileno) [ 566.779292] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.779292] env[62923]: result = function(*args, **kwargs) [ 566.779292] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.779292] env[62923]: return func(*args, **kwargs) [ 566.779292] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.779292] env[62923]: raise e [ 566.779292] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.779292] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 566.779292] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.779292] env[62923]: created_port_ids = self._update_ports_for_instance( [ 566.779292] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.779292] env[62923]: with excutils.save_and_reraise_exception(): [ 566.779292] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.779292] env[62923]: self.force_reraise() [ 566.779292] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.779292] env[62923]: raise self.value [ 566.779292] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.779292] env[62923]: updated_port = self._update_port( [ 566.779292] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.779292] env[62923]: _ensure_no_port_binding_failure(port) [ 566.779292] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.779292] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 566.780109] env[62923]: nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 566.780109] env[62923]: Removing descriptor: 17 [ 566.780109] env[62923]: ERROR nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Traceback (most recent call last): [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] yield resources [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self.driver.spawn(context, instance, image_meta, [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.780109] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] vm_ref = self.build_virtual_machine(instance, [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] for vif in network_info: [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return self._sync_wrapper(fn, *args, **kwargs) [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self.wait() [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self[:] = self._gt.wait() [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return self._exit_event.wait() [ 566.780486] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] result = hub.switch() [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return self.greenlet.switch() [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] result = function(*args, **kwargs) [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return func(*args, **kwargs) [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] raise e [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] nwinfo = self.network_api.allocate_for_instance( [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 566.780833] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] created_port_ids = self._update_ports_for_instance( [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] with excutils.save_and_reraise_exception(): [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self.force_reraise() [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] raise self.value [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] updated_port = self._update_port( [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] _ensure_no_port_binding_failure(port) [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.781208] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] raise exception.PortBindingFailed(port_id=port['id']) [ 566.781545] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 566.781545] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] [ 566.781545] env[62923]: INFO nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Terminating instance [ 566.785483] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquiring lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.785483] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquired lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.785706] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.007844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Releasing lock "refresh_cache-e0ce4383-cade-4d85-a8a5-2437b9203d98" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.008140] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 567.008368] env[62923]: DEBUG nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 567.008555] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.038978] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.090015] env[62923]: DEBUG nova.scheduler.client.report [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.205183] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Releasing lock "refresh_cache-a33da17c-bbb2-4307-b4b3-56cec5cb757e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.205633] env[62923]: DEBUG nova.compute.manager [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 567.205826] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 567.206842] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12010453-ed9a-48cf-a43b-d6d02c8f95a1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.219757] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 567.219757] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bbacf5d-8d17-4285-ab3c-190b6c693e94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.232344] env[62923]: DEBUG oslo_vmware.api [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 567.232344] env[62923]: value = "task-1369856" [ 567.232344] env[62923]: _type = "Task" [ 567.232344] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.251309] env[62923]: DEBUG oslo_vmware.api [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369856, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.291042] env[62923]: DEBUG nova.compute.manager [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Received event network-changed-c2187654-721c-41d1-8036-921e8df47aaf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 567.291208] env[62923]: DEBUG nova.compute.manager [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Refreshing instance network info cache due to event network-changed-c2187654-721c-41d1-8036-921e8df47aaf. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 567.291396] env[62923]: DEBUG oslo_concurrency.lockutils [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] Acquiring lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.346953] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.541512] env[62923]: DEBUG nova.network.neutron [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.603935] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.604590] env[62923]: ERROR nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Traceback (most recent call last): [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self.driver.spawn(context, instance, image_meta, [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] vm_ref = self.build_virtual_machine(instance, [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.604590] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] for vif in network_info: [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return self._sync_wrapper(fn, *args, **kwargs) [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self.wait() [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self[:] = self._gt.wait() [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return self._exit_event.wait() [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] result = hub.switch() [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.604897] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return self.greenlet.switch() [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] result = function(*args, **kwargs) [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] return func(*args, **kwargs) [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] raise e [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] nwinfo = self.network_api.allocate_for_instance( [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] created_port_ids = self._update_ports_for_instance( [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] with excutils.save_and_reraise_exception(): [ 567.605571] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] self.force_reraise() [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] raise self.value [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] updated_port = self._update_port( [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] _ensure_no_port_binding_failure(port) [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] raise exception.PortBindingFailed(port_id=port['id']) [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] nova.exception.PortBindingFailed: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. [ 567.606170] env[62923]: ERROR nova.compute.manager [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] [ 567.606509] env[62923]: DEBUG nova.compute.utils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 567.606509] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.439s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.606709] env[62923]: DEBUG nova.objects.instance [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 567.613019] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Build of instance 3cb4806d-dffa-4c41-9730-f29d2aad059a was re-scheduled: Binding failed for port 8137e2da-4915-439e-9eec-7b3b901c8237, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 567.613019] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 567.613019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquiring lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.613019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Acquired lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.613252] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.742724] env[62923]: DEBUG oslo_vmware.api [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369856, 'name': PowerOffVM_Task, 'duration_secs': 0.130105} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.742985] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 567.743159] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 567.743394] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f6ae9e6-a96b-4e57-9fb9-9bab061a2616 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.768827] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 567.769064] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 567.769248] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleting the datastore file [datastore1] a33da17c-bbb2-4307-b4b3-56cec5cb757e {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 567.769556] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-529d091a-1329-481f-88bd-d1c9e770ec3d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.777394] env[62923]: DEBUG oslo_vmware.api [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for the task: (returnval){ [ 567.777394] env[62923]: value = "task-1369858" [ 567.777394] env[62923]: _type = "Task" [ 567.777394] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.790444] env[62923]: DEBUG oslo_vmware.api [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.811871] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.939092] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquiring lock "5a825d36-7563-4792-8f68-8814ec96cfde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.939426] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "5a825d36-7563-4792-8f68-8814ec96cfde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.049670] env[62923]: INFO nova.compute.manager [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: e0ce4383-cade-4d85-a8a5-2437b9203d98] Took 1.04 seconds to deallocate network for instance. [ 568.165295] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.294155] env[62923]: DEBUG oslo_vmware.api [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Task: {'id': task-1369858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111751} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.294756] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 568.294848] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 568.295482] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 568.295482] env[62923]: INFO nova.compute.manager [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Took 1.09 seconds to destroy the instance on the hypervisor. [ 568.295482] env[62923]: DEBUG oslo.service.loopingcall [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.296260] env[62923]: DEBUG nova.compute.manager [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.296260] env[62923]: DEBUG nova.network.neutron [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 568.317530] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Releasing lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.317530] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 568.317530] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 568.317530] env[62923]: DEBUG oslo_concurrency.lockutils [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] Acquired lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.317530] env[62923]: DEBUG nova.network.neutron [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Refreshing network info cache for port c2187654-721c-41d1-8036-921e8df47aaf {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.318398] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3395670-61b1-4f5c-9227-ff8b29afa8b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.332671] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d9a2d3-2579-4351-b032-bb37c6dbeb48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.348129] env[62923]: DEBUG nova.network.neutron [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.365444] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 33910d6d-0911-4e2a-82af-1b705cedd3fe could not be found. [ 568.365576] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 568.365751] env[62923]: INFO nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Took 0.05 seconds to destroy the instance on the hypervisor. [ 568.365984] env[62923]: DEBUG oslo.service.loopingcall [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.366748] env[62923]: DEBUG nova.compute.manager [-] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.366842] env[62923]: DEBUG nova.network.neutron [-] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 568.420853] env[62923]: DEBUG nova.network.neutron [-] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.529702] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.620689] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25a6f819-f17c-42a1-8c43-594f97189bc4 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.622019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.437s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.855280] env[62923]: DEBUG nova.network.neutron [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.874573] env[62923]: DEBUG nova.network.neutron [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.927878] env[62923]: DEBUG nova.network.neutron [-] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.032891] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Releasing lock "refresh_cache-3cb4806d-dffa-4c41-9730-f29d2aad059a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.033561] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 569.033561] env[62923]: DEBUG nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.033561] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 569.088499] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.108802] env[62923]: INFO nova.scheduler.client.report [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Deleted allocations for instance e0ce4383-cade-4d85-a8a5-2437b9203d98 [ 569.172025] env[62923]: DEBUG nova.network.neutron [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.358227] env[62923]: INFO nova.compute.manager [-] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Took 1.06 seconds to deallocate network for instance. [ 569.431412] env[62923]: INFO nova.compute.manager [-] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Took 1.06 seconds to deallocate network for instance. [ 569.435639] env[62923]: DEBUG nova.compute.claims [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 569.435639] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.535791] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68967cd5-f105-4b67-a337-680866e07927 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.548390] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030a330b-ad87-4473-89bc-1028b6174e54 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.583225] env[62923]: ERROR nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 569.583225] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.583225] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 569.583225] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 569.583225] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.583225] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.583225] env[62923]: ERROR nova.compute.manager raise self.value [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 569.583225] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 569.583225] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.583225] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 569.583772] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.583772] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 569.583772] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 569.583772] env[62923]: ERROR nova.compute.manager [ 569.583772] env[62923]: Traceback (most recent call last): [ 569.583772] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 569.583772] env[62923]: listener.cb(fileno) [ 569.583772] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.583772] env[62923]: result = function(*args, **kwargs) [ 569.583772] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 569.583772] env[62923]: return func(*args, **kwargs) [ 569.583772] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.583772] env[62923]: raise e [ 569.583772] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.583772] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 569.583772] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 569.583772] env[62923]: created_port_ids = self._update_ports_for_instance( [ 569.583772] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 569.583772] env[62923]: with excutils.save_and_reraise_exception(): [ 569.583772] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.583772] env[62923]: self.force_reraise() [ 569.583772] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.583772] env[62923]: raise self.value [ 569.583772] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 569.583772] env[62923]: updated_port = self._update_port( [ 569.583772] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.583772] env[62923]: _ensure_no_port_binding_failure(port) [ 569.583772] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.583772] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 569.584622] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 569.584622] env[62923]: Removing descriptor: 21 [ 569.584622] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d637dc6e-f620-4c48-ba49-938ae4d0d1be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.589458] env[62923]: ERROR nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Traceback (most recent call last): [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] yield resources [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self.driver.spawn(context, instance, image_meta, [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] vm_ref = self.build_virtual_machine(instance, [ 569.589458] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] for vif in network_info: [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return self._sync_wrapper(fn, *args, **kwargs) [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self.wait() [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self[:] = self._gt.wait() [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return self._exit_event.wait() [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.590193] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] result = hub.switch() [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return self.greenlet.switch() [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] result = function(*args, **kwargs) [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return func(*args, **kwargs) [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] raise e [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] nwinfo = self.network_api.allocate_for_instance( [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] created_port_ids = self._update_ports_for_instance( [ 569.590772] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] with excutils.save_and_reraise_exception(): [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self.force_reraise() [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] raise self.value [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] updated_port = self._update_port( [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] _ensure_no_port_binding_failure(port) [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] raise exception.PortBindingFailed(port_id=port['id']) [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 569.591379] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] [ 569.591814] env[62923]: INFO nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Terminating instance [ 569.595018] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquiring lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.595018] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquired lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.595018] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.596346] env[62923]: DEBUG nova.network.neutron [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.598838] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd111dd-a2dc-467a-83a4-f3bc2036a3e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.617238] env[62923]: DEBUG nova.compute.provider_tree [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.623054] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3b294f42-30c0-40e8-982e-410cea55cd09 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "e0ce4383-cade-4d85-a8a5-2437b9203d98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.350s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.667238] env[62923]: DEBUG nova.compute.manager [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Received event network-vif-deleted-c2187654-721c-41d1-8036-921e8df47aaf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.667440] env[62923]: DEBUG nova.compute.manager [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Received event network-changed-42f42e23-879b-49d3-9d2f-4f1e206bbfae {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 569.667638] env[62923]: DEBUG nova.compute.manager [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Refreshing instance network info cache due to event network-changed-42f42e23-879b-49d3-9d2f-4f1e206bbfae. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 569.667976] env[62923]: DEBUG oslo_concurrency.lockutils [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] Acquiring lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.674148] env[62923]: DEBUG oslo_concurrency.lockutils [req-ef5b30be-ece2-4fd7-a93d-eec448fb3cd1 req-1395d7dc-2c0e-4f88-8964-43cc35e7cafe service nova] Releasing lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.869088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.104447] env[62923]: INFO nova.compute.manager [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] [instance: 3cb4806d-dffa-4c41-9730-f29d2aad059a] Took 1.07 seconds to deallocate network for instance. [ 570.120096] env[62923]: DEBUG nova.scheduler.client.report [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 570.130349] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 570.210775] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.598016] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.626824] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.627043] env[62923]: ERROR nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Traceback (most recent call last): [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self.driver.spawn(context, instance, image_meta, [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] vm_ref = self.build_virtual_machine(instance, [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.627043] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] for vif in network_info: [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return self._sync_wrapper(fn, *args, **kwargs) [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self.wait() [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self[:] = self._gt.wait() [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return self._exit_event.wait() [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] result = hub.switch() [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.627429] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return self.greenlet.switch() [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] result = function(*args, **kwargs) [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] return func(*args, **kwargs) [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] raise e [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] nwinfo = self.network_api.allocate_for_instance( [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] created_port_ids = self._update_ports_for_instance( [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] with excutils.save_and_reraise_exception(): [ 570.627859] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] self.force_reraise() [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] raise self.value [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] updated_port = self._update_port( [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] _ensure_no_port_binding_failure(port) [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] raise exception.PortBindingFailed(port_id=port['id']) [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] nova.exception.PortBindingFailed: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. [ 570.628376] env[62923]: ERROR nova.compute.manager [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] [ 570.629800] env[62923]: DEBUG nova.compute.utils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 570.631265] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.095s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.638603] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Build of instance 4fe36f05-d730-4fb1-ab05-0425be619dfb was re-scheduled: Binding failed for port 63c15049-830e-4a51-bf4e-e3cfee34896a, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 570.638603] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 570.638603] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquiring lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.638603] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Acquired lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.638836] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.672354] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.101572] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Releasing lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.102016] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 571.102224] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 571.102615] env[62923]: DEBUG oslo_concurrency.lockutils [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] Acquired lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.103257] env[62923]: DEBUG nova.network.neutron [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Refreshing network info cache for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.103890] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1fe9d9b8-5c0a-42a8-8f53-9233736d3a9c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.125634] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4ca761-6172-4ef5-a34b-362fa3312887 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.155281] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25758cb8-6635-4284-bc94-a95389af3c8a could not be found. [ 571.155281] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 571.155281] env[62923]: INFO nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 571.155428] env[62923]: DEBUG oslo.service.loopingcall [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.156575] env[62923]: INFO nova.scheduler.client.report [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Deleted allocations for instance 3cb4806d-dffa-4c41-9730-f29d2aad059a [ 571.163637] env[62923]: DEBUG nova.compute.manager [-] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.163746] env[62923]: DEBUG nova.network.neutron [-] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 571.212713] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.216588] env[62923]: DEBUG nova.network.neutron [-] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.400969] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.521461] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49632b80-009a-4027-8a24-f725d0db5afc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.532187] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3acde58-82b2-40cb-8be5-6b85e085f790 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.570242] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51288597-709e-46f7-8a08-d8914ef245d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.578445] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7fc26f-096e-4dc8-b45b-7f154d5ef3ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.593981] env[62923]: DEBUG nova.compute.provider_tree [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.647255] env[62923]: DEBUG nova.network.neutron [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.667916] env[62923]: DEBUG oslo_concurrency.lockutils [None req-420844ca-7e2b-49c4-9d1f-da119b804662 tempest-ServersV294TestFqdnHostnames-131500287 tempest-ServersV294TestFqdnHostnames-131500287-project-member] Lock "3cb4806d-dffa-4c41-9730-f29d2aad059a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.279s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.720428] env[62923]: DEBUG nova.network.neutron [-] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.879865] env[62923]: DEBUG nova.network.neutron [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.906441] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Releasing lock "refresh_cache-4fe36f05-d730-4fb1-ab05-0425be619dfb" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.906441] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 571.906590] env[62923]: DEBUG nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.906825] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 571.935662] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.097350] env[62923]: DEBUG nova.scheduler.client.report [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 572.173109] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 572.223571] env[62923]: INFO nova.compute.manager [-] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Took 1.06 seconds to deallocate network for instance. [ 572.226141] env[62923]: DEBUG nova.compute.claims [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 572.226341] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.383944] env[62923]: DEBUG oslo_concurrency.lockutils [req-ebe78f47-fe65-4e6c-a9bf-ec1e5a3e6cfe req-9c95d75e-3d29-4d5d-8f3f-d2c1e5692ccb service nova] Releasing lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.439465] env[62923]: DEBUG nova.network.neutron [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.567612] env[62923]: DEBUG nova.compute.manager [req-f44d4e98-b0cf-482f-a24d-ed62a153f32e req-5242f2ce-e256-4f32-bbc9-8acf2b04375c service nova] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Received event network-vif-deleted-42f42e23-879b-49d3-9d2f-4f1e206bbfae {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.606770] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.975s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.607044] env[62923]: ERROR nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Traceback (most recent call last): [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self.driver.spawn(context, instance, image_meta, [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] vm_ref = self.build_virtual_machine(instance, [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] vif_infos = vmwarevif.get_vif_info(self._session, [ 572.607044] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] for vif in network_info: [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return self._sync_wrapper(fn, *args, **kwargs) [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self.wait() [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self[:] = self._gt.wait() [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return self._exit_event.wait() [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] result = hub.switch() [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 572.607416] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return self.greenlet.switch() [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] result = function(*args, **kwargs) [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] return func(*args, **kwargs) [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] raise e [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] nwinfo = self.network_api.allocate_for_instance( [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] created_port_ids = self._update_ports_for_instance( [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] with excutils.save_and_reraise_exception(): [ 572.607793] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] self.force_reraise() [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] raise self.value [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] updated_port = self._update_port( [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] _ensure_no_port_binding_failure(port) [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] raise exception.PortBindingFailed(port_id=port['id']) [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] nova.exception.PortBindingFailed: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. [ 572.608216] env[62923]: ERROR nova.compute.manager [instance: 04d96512-dc09-42ff-96d0-961f7359318c] [ 572.608559] env[62923]: DEBUG nova.compute.utils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 572.610125] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Build of instance 04d96512-dc09-42ff-96d0-961f7359318c was re-scheduled: Binding failed for port 2bbe3814-1872-4090-baef-b83249364387, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 572.610511] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 572.610705] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquiring lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.610855] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Acquired lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.611651] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 572.613733] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.568s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.615472] env[62923]: INFO nova.compute.claims [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.694248] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.846529] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquiring lock "2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.846773] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Lock "2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.943130] env[62923]: INFO nova.compute.manager [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] [instance: 4fe36f05-d730-4fb1-ab05-0425be619dfb] Took 1.04 seconds to deallocate network for instance. [ 573.135362] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.292066] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.799363] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Releasing lock "refresh_cache-04d96512-dc09-42ff-96d0-961f7359318c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.799657] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 573.800245] env[62923]: DEBUG nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.800245] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 573.836893] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.867669] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquiring lock "f106b311-fc2f-4811-b7e0-d680de236b78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.867892] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Lock "f106b311-fc2f-4811-b7e0-d680de236b78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.990234] env[62923]: INFO nova.scheduler.client.report [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Deleted allocations for instance 4fe36f05-d730-4fb1-ab05-0425be619dfb [ 574.074272] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504c838d-6e88-491c-81a3-8afd1c2db2ad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.081974] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21bcde89-82c7-486c-a94c-5b663cd79ad7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.121269] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006872f7-e87e-4388-9237-475edc954e35 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.133625] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3f7f34-91ff-404b-8e2c-9ca1e83072bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.151566] env[62923]: DEBUG nova.compute.provider_tree [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.339782] env[62923]: DEBUG nova.network.neutron [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.497059] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquiring lock "f247b499-0a04-47ae-98b0-cb3f7f088a62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.497308] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Lock "f247b499-0a04-47ae-98b0-cb3f7f088a62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.499517] env[62923]: DEBUG oslo_concurrency.lockutils [None req-65e2b4dd-0ab0-4536-acd3-a219e098b086 tempest-ServerDiagnosticsTest-2782258 tempest-ServerDiagnosticsTest-2782258-project-member] Lock "4fe36f05-d730-4fb1-ab05-0425be619dfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.820s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.656202] env[62923]: DEBUG nova.scheduler.client.report [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 574.843056] env[62923]: INFO nova.compute.manager [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] [instance: 04d96512-dc09-42ff-96d0-961f7359318c] Took 1.04 seconds to deallocate network for instance. [ 575.004375] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 575.161653] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.162218] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 575.165491] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.558s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.556325] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.556325] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquiring lock "bb28249e-a9d3-4d7d-bd05-128f1110dbca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.556325] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Lock "bb28249e-a9d3-4d7d-bd05-128f1110dbca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.672171] env[62923]: DEBUG nova.compute.utils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 575.680676] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 575.680676] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 575.879714] env[62923]: INFO nova.scheduler.client.report [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Deleted allocations for instance 04d96512-dc09-42ff-96d0-961f7359318c [ 575.914017] env[62923]: DEBUG nova.policy [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6dd4b19774a453c9ce2dd9f46bc5c69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bbba7bb465cf45f7a4d8990f33db11b3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 576.050863] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de1a02f-81c0-4cef-8b46-e7f33d6dc433 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.059372] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c98fa0-87ec-4fd1-a736-a3e7f18b64ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.100544] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b66a7a-9c27-4a26-aa5a-2059bcafde40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.108784] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c221d8c0-2fca-4f48-ae5a-81d6c6da10fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.122860] env[62923]: DEBUG nova.compute.provider_tree [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.174987] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 576.391629] env[62923]: DEBUG oslo_concurrency.lockutils [None req-443e6465-0b30-4a44-9082-47679b274cd1 tempest-ServersAdminTestJSON-1458064838 tempest-ServersAdminTestJSON-1458064838-project-member] Lock "04d96512-dc09-42ff-96d0-961f7359318c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.635s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.627182] env[62923]: DEBUG nova.scheduler.client.report [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.901646] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 577.117506] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Successfully created port: 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 577.133479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.967s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.133959] env[62923]: ERROR nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Traceback (most recent call last): [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self.driver.spawn(context, instance, image_meta, [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] vm_ref = self.build_virtual_machine(instance, [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.133959] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] for vif in network_info: [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return self._sync_wrapper(fn, *args, **kwargs) [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self.wait() [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self[:] = self._gt.wait() [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return self._exit_event.wait() [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] result = hub.switch() [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.135360] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return self.greenlet.switch() [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] result = function(*args, **kwargs) [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] return func(*args, **kwargs) [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] raise e [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] nwinfo = self.network_api.allocate_for_instance( [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] created_port_ids = self._update_ports_for_instance( [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] with excutils.save_and_reraise_exception(): [ 577.135772] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] self.force_reraise() [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] raise self.value [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] updated_port = self._update_port( [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] _ensure_no_port_binding_failure(port) [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] raise exception.PortBindingFailed(port_id=port['id']) [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] nova.exception.PortBindingFailed: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. [ 577.136207] env[62923]: ERROR nova.compute.manager [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] [ 577.136545] env[62923]: DEBUG nova.compute.utils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 577.137140] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.365s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.137345] env[62923]: DEBUG nova.objects.instance [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lazy-loading 'resources' on Instance uuid 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 577.138630] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Build of instance 66b7e04a-968b-44b0-9bb2-e467810bbf6b was re-scheduled: Binding failed for port aa99e258-e40f-4e1b-8337-168e3553f6ce, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 577.141891] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 577.142566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.142566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquired lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.142566] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 577.189302] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 577.221380] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 577.221632] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 577.221788] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.221965] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 577.222676] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.222676] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 577.222676] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 577.222676] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 577.222853] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 577.223085] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 577.224653] env[62923]: DEBUG nova.virt.hardware [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 577.226346] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc8ebbb-bdd7-4cc1-88da-53dce94faaf7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.236585] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073c3beb-aceb-480b-8984-605baf2a06fb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.438565] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.584954] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquiring lock "0c569bd2-7622-4285-9439-209a88f2e84d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.585213] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Lock "0c569bd2-7622-4285-9439-209a88f2e84d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.678940] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.845221] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.995025] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c16db65-eba0-49f0-b84a-606ebb8d906a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.003576] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b124b41-e25e-4c65-8263-5116a926b35c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.035107] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd419bd-239b-4d0f-b2b8-ca12c6238a78 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.042415] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ff24ee-a95f-4c3a-986d-feb2958bab68 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.056297] env[62923]: DEBUG nova.compute.provider_tree [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.352022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Releasing lock "refresh_cache-66b7e04a-968b-44b0-9bb2-e467810bbf6b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.352022] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 578.352022] env[62923]: DEBUG nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 578.352022] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 578.395340] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.559684] env[62923]: DEBUG nova.scheduler.client.report [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.901324] env[62923]: DEBUG nova.network.neutron [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.065791] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.929s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.072512] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.671s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.072512] env[62923]: INFO nova.compute.claims [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.096437] env[62923]: INFO nova.scheduler.client.report [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Deleted allocations for instance 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3 [ 579.285024] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.285024] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.404232] env[62923]: INFO nova.compute.manager [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 66b7e04a-968b-44b0-9bb2-e467810bbf6b] Took 1.05 seconds to deallocate network for instance. [ 579.606469] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9b4f9855-bd37-4975-86cb-8ab2da2a0cd9 tempest-ServerDiagnosticsV248Test-493767357 tempest-ServerDiagnosticsV248Test-493767357-project-member] Lock "6ca62d1b-9533-4b83-8e8a-7f62a34c90a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.632s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.881713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "7ab06c90-5d19-43fa-b91b-7d17f85d3258" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.881713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "7ab06c90-5d19-43fa-b91b-7d17f85d3258" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.461988] env[62923]: INFO nova.scheduler.client.report [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Deleted allocations for instance 66b7e04a-968b-44b0-9bb2-e467810bbf6b [ 580.472447] env[62923]: ERROR nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 580.472447] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.472447] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.472447] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.472447] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.472447] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.472447] env[62923]: ERROR nova.compute.manager raise self.value [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.472447] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 580.472447] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.472447] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 580.472930] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.472930] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 580.472930] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 580.472930] env[62923]: ERROR nova.compute.manager [ 580.472930] env[62923]: Traceback (most recent call last): [ 580.472930] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 580.472930] env[62923]: listener.cb(fileno) [ 580.472930] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 580.472930] env[62923]: result = function(*args, **kwargs) [ 580.472930] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 580.472930] env[62923]: return func(*args, **kwargs) [ 580.472930] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 580.472930] env[62923]: raise e [ 580.472930] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.472930] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 580.472930] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.472930] env[62923]: created_port_ids = self._update_ports_for_instance( [ 580.472930] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.472930] env[62923]: with excutils.save_and_reraise_exception(): [ 580.472930] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.472930] env[62923]: self.force_reraise() [ 580.472930] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.472930] env[62923]: raise self.value [ 580.472930] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.472930] env[62923]: updated_port = self._update_port( [ 580.472930] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.472930] env[62923]: _ensure_no_port_binding_failure(port) [ 580.472930] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.472930] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 580.473846] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 580.473846] env[62923]: Removing descriptor: 21 [ 580.476370] env[62923]: ERROR nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Traceback (most recent call last): [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] yield resources [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self.driver.spawn(context, instance, image_meta, [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] vm_ref = self.build_virtual_machine(instance, [ 580.476370] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] vif_infos = vmwarevif.get_vif_info(self._session, [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] for vif in network_info: [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return self._sync_wrapper(fn, *args, **kwargs) [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self.wait() [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self[:] = self._gt.wait() [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return self._exit_event.wait() [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 580.476721] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] result = hub.switch() [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return self.greenlet.switch() [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] result = function(*args, **kwargs) [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return func(*args, **kwargs) [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] raise e [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] nwinfo = self.network_api.allocate_for_instance( [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] created_port_ids = self._update_ports_for_instance( [ 580.477107] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] with excutils.save_and_reraise_exception(): [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self.force_reraise() [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] raise self.value [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] updated_port = self._update_port( [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] _ensure_no_port_binding_failure(port) [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] raise exception.PortBindingFailed(port_id=port['id']) [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 580.477573] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] [ 580.477941] env[62923]: INFO nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Terminating instance [ 580.482759] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquiring lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.482759] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquired lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.482759] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 580.552010] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68db27c-4be9-44dd-85c8-486e48a46285 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.560887] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6568d3-4187-4dcd-8fdf-9ae17485dc13 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.594084] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16230df9-cbbc-407d-bf71-6b87e48f3621 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.601463] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a30f9f-c517-4909-b72d-e53fe7021be9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.614932] env[62923]: DEBUG nova.compute.provider_tree [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.681947] env[62923]: DEBUG nova.compute.manager [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Received event network-changed-11d62c03-b355-4dd9-8985-0a6ebcf8d0d0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 580.682185] env[62923]: DEBUG nova.compute.manager [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Refreshing instance network info cache due to event network-changed-11d62c03-b355-4dd9-8985-0a6ebcf8d0d0. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 580.682408] env[62923]: DEBUG oslo_concurrency.lockutils [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] Acquiring lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.984939] env[62923]: DEBUG oslo_concurrency.lockutils [None req-457c8f5a-445c-45fa-8635-6427661f088e tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "66b7e04a-968b-44b0-9bb2-e467810bbf6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.964s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.016184] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.121112] env[62923]: DEBUG nova.scheduler.client.report [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.173342] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.489619] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 581.627417] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.627930] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 581.631244] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.761s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.631487] env[62923]: DEBUG nova.objects.instance [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 581.676809] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Releasing lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.676994] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 581.677230] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 581.677936] env[62923]: DEBUG oslo_concurrency.lockutils [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] Acquired lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.679477] env[62923]: DEBUG nova.network.neutron [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Refreshing network info cache for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 581.679477] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42119c2a-6052-4d83-88e2-1af6d0aaa557 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.693115] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05f495e-6f2a-4448-a567-ad77da8a0b27 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.720022] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f5cc707-70e8-48fb-8d9d-904f3e0130af could not be found. [ 581.720022] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.720022] env[62923]: INFO nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Took 0.04 seconds to destroy the instance on the hypervisor. [ 581.720022] env[62923]: DEBUG oslo.service.loopingcall [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.720022] env[62923]: DEBUG nova.compute.manager [-] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 581.720022] env[62923]: DEBUG nova.network.neutron [-] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 581.795350] env[62923]: DEBUG nova.network.neutron [-] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.030021] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.145123] env[62923]: DEBUG nova.compute.utils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.145123] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 582.145123] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.252945] env[62923]: DEBUG nova.network.neutron [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.297180] env[62923]: DEBUG nova.network.neutron [-] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.324139] env[62923]: DEBUG nova.policy [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 582.600883] env[62923]: DEBUG nova.network.neutron [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.647615] env[62923]: DEBUG oslo_concurrency.lockutils [None req-938772ee-b4f1-4c99-9a58-1ef068837006 tempest-ServersAdmin275Test-973179173 tempest-ServersAdmin275Test-973179173-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.648615] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.609s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.656997] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 582.723808] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "7559e4b7-1cfe-438e-8a14-a964c1a76d52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.723808] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "7559e4b7-1cfe-438e-8a14-a964c1a76d52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.800662] env[62923]: INFO nova.compute.manager [-] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Took 1.08 seconds to deallocate network for instance. [ 582.804210] env[62923]: DEBUG nova.compute.claims [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 582.804210] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.104505] env[62923]: DEBUG oslo_concurrency.lockutils [req-52f8ea92-be67-4d92-b9ff-2ae25d5d85ed req-63c1c741-332a-43dc-ba4b-f62de026b4f9 service nova] Releasing lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.516083] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Successfully created port: 572b379b-a2f8-4652-b17d-3e90afacb781 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.554932] env[62923]: DEBUG nova.compute.manager [req-ff2a5bc5-962b-4daf-8c31-4fb027030a9b req-3f4daa64-245b-4d56-aebb-505f507f3438 service nova] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Received event network-vif-deleted-11d62c03-b355-4dd9-8985-0a6ebcf8d0d0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 583.598888] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f533e3-11dd-45a0-9101-499102f7b77b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.610431] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ea7275-774d-4148-9ffd-4760b5cf5876 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.642662] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b4e756-c29e-44fb-b192-fe939d700768 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.649685] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6a8213-f06a-4ac0-8aba-982d5602fbe3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.666941] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 583.669142] env[62923]: DEBUG nova.compute.provider_tree [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.696738] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.697087] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.697195] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.697317] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.697460] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.697603] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.697804] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.697988] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.698222] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.698402] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.698574] env[62923]: DEBUG nova.virt.hardware [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.699752] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f621431-8f0c-4c91-b01e-f6eea2c3c9ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.707588] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58c6e2e-f60f-4888-aeea-bb54fc568d24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.172810] env[62923]: DEBUG nova.scheduler.client.report [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.679955] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.031s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.680652] env[62923]: ERROR nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Traceback (most recent call last): [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self.driver.spawn(context, instance, image_meta, [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] vm_ref = self.build_virtual_machine(instance, [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.680652] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] for vif in network_info: [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return self._sync_wrapper(fn, *args, **kwargs) [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self.wait() [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self[:] = self._gt.wait() [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return self._exit_event.wait() [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] result = hub.switch() [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.681071] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return self.greenlet.switch() [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] result = function(*args, **kwargs) [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] return func(*args, **kwargs) [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] raise e [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] nwinfo = self.network_api.allocate_for_instance( [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] created_port_ids = self._update_ports_for_instance( [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] with excutils.save_and_reraise_exception(): [ 584.683254] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] self.force_reraise() [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] raise self.value [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] updated_port = self._update_port( [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] _ensure_no_port_binding_failure(port) [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] raise exception.PortBindingFailed(port_id=port['id']) [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] nova.exception.PortBindingFailed: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. [ 584.683685] env[62923]: ERROR nova.compute.manager [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] [ 584.684092] env[62923]: DEBUG nova.compute.utils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 584.684092] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.247s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.692024] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Build of instance e08d9f27-d2b3-4532-862f-b68e830e8d17 was re-scheduled: Binding failed for port 49f26d9b-6984-4a4b-8022-1333cb10294a, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 584.692024] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 584.692024] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquiring lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.692024] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Acquired lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.692607] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.265346] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.637938] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a4da25-49e4-4b21-b7c1-05b39bceeb7b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.648183] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9720e3da-889b-4a76-a8a4-f0a899c802f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.652532] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.685927] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819ef587-cc93-4139-a365-91094a7bcec9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.695203] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ec7a20-22ad-4163-9ea2-cc66648bb6c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.713453] env[62923]: DEBUG nova.compute.provider_tree [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.839613] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "9654e9d5-a809-4875-99bb-fd99d7a7fbd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.839885] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "9654e9d5-a809-4875-99bb-fd99d7a7fbd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.160889] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Releasing lock "refresh_cache-e08d9f27-d2b3-4532-862f-b68e830e8d17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.160889] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 586.160889] env[62923]: DEBUG nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.160889] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.196892] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.216431] env[62923]: DEBUG nova.scheduler.client.report [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.701899] env[62923]: DEBUG nova.network.neutron [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.723169] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.040s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.723839] env[62923]: ERROR nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Traceback (most recent call last): [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self.driver.spawn(context, instance, image_meta, [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] vm_ref = self.build_virtual_machine(instance, [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 586.723839] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] for vif in network_info: [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return self._sync_wrapper(fn, *args, **kwargs) [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self.wait() [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self[:] = self._gt.wait() [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return self._exit_event.wait() [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] result = hub.switch() [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.724280] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return self.greenlet.switch() [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] result = function(*args, **kwargs) [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] return func(*args, **kwargs) [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] raise e [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] nwinfo = self.network_api.allocate_for_instance( [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] created_port_ids = self._update_ports_for_instance( [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] with excutils.save_and_reraise_exception(): [ 586.724705] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] self.force_reraise() [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] raise self.value [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] updated_port = self._update_port( [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] _ensure_no_port_binding_failure(port) [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] raise exception.PortBindingFailed(port_id=port['id']) [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] nova.exception.PortBindingFailed: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. [ 586.725169] env[62923]: ERROR nova.compute.manager [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] [ 586.725695] env[62923]: DEBUG nova.compute.utils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 586.726984] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.858s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.727092] env[62923]: DEBUG nova.objects.instance [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lazy-loading 'resources' on Instance uuid a33da17c-bbb2-4307-b4b3-56cec5cb757e {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 586.728266] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Build of instance 33910d6d-0911-4e2a-82af-1b705cedd3fe was re-scheduled: Binding failed for port c2187654-721c-41d1-8036-921e8df47aaf, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 586.730214] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 586.730214] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquiring lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.730214] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Acquired lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.730214] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.208671] env[62923]: INFO nova.compute.manager [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] [instance: e08d9f27-d2b3-4532-862f-b68e830e8d17] Took 1.05 seconds to deallocate network for instance. [ 587.294925] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.358696] env[62923]: ERROR nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 587.358696] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.358696] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.358696] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.358696] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.358696] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.358696] env[62923]: ERROR nova.compute.manager raise self.value [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.358696] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 587.358696] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.358696] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 587.359287] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.359287] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 587.359287] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 587.359287] env[62923]: ERROR nova.compute.manager [ 587.359287] env[62923]: Traceback (most recent call last): [ 587.359287] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 587.359287] env[62923]: listener.cb(fileno) [ 587.359287] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.359287] env[62923]: result = function(*args, **kwargs) [ 587.359287] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 587.359287] env[62923]: return func(*args, **kwargs) [ 587.359287] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.359287] env[62923]: raise e [ 587.359287] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.359287] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 587.359287] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.359287] env[62923]: created_port_ids = self._update_ports_for_instance( [ 587.359287] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.359287] env[62923]: with excutils.save_and_reraise_exception(): [ 587.359287] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.359287] env[62923]: self.force_reraise() [ 587.359287] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.359287] env[62923]: raise self.value [ 587.359287] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.359287] env[62923]: updated_port = self._update_port( [ 587.359287] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.359287] env[62923]: _ensure_no_port_binding_failure(port) [ 587.359287] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.359287] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 587.360381] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 587.360381] env[62923]: Removing descriptor: 21 [ 587.360381] env[62923]: ERROR nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Traceback (most recent call last): [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] yield resources [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self.driver.spawn(context, instance, image_meta, [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.360381] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] vm_ref = self.build_virtual_machine(instance, [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] for vif in network_info: [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return self._sync_wrapper(fn, *args, **kwargs) [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self.wait() [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self[:] = self._gt.wait() [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return self._exit_event.wait() [ 587.360812] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] result = hub.switch() [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return self.greenlet.switch() [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] result = function(*args, **kwargs) [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return func(*args, **kwargs) [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] raise e [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] nwinfo = self.network_api.allocate_for_instance( [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.361261] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] created_port_ids = self._update_ports_for_instance( [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] with excutils.save_and_reraise_exception(): [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self.force_reraise() [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] raise self.value [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] updated_port = self._update_port( [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] _ensure_no_port_binding_failure(port) [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.361684] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] raise exception.PortBindingFailed(port_id=port['id']) [ 587.362067] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 587.362067] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] [ 587.362067] env[62923]: INFO nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Terminating instance [ 587.363319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.363521] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.363776] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.507518] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.618741] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9317e795-c0af-4634-9d5a-3a9a594d9aa5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.627934] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355fb7b6-0e58-4855-94e3-b181815f1341 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.666711] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1cc016-bbdb-44d9-8269-cd11ed021ad7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.676115] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f55493-2b32-45ff-83cf-0bff29698477 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.691372] env[62923]: DEBUG nova.compute.provider_tree [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.771331] env[62923]: DEBUG nova.compute.manager [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Received event network-changed-572b379b-a2f8-4652-b17d-3e90afacb781 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 587.771331] env[62923]: DEBUG nova.compute.manager [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Refreshing instance network info cache due to event network-changed-572b379b-a2f8-4652-b17d-3e90afacb781. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 587.771331] env[62923]: DEBUG oslo_concurrency.lockutils [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] Acquiring lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.898931] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.014179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Releasing lock "refresh_cache-33910d6d-0911-4e2a-82af-1b705cedd3fe" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.014179] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 588.014179] env[62923]: DEBUG nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.014179] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 588.043326] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.084934] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.197421] env[62923]: DEBUG nova.scheduler.client.report [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.211904] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "91043784-2e4a-4fa4-87de-1c45971e64c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.212147] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "91043784-2e4a-4fa4-87de-1c45971e64c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.255394] env[62923]: INFO nova.scheduler.client.report [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Deleted allocations for instance e08d9f27-d2b3-4532-862f-b68e830e8d17 [ 588.306259] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquiring lock "13e71116-cb20-4fc5-8ceb-3a6098bae438" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.307145] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Lock "13e71116-cb20-4fc5-8ceb-3a6098bae438" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.546940] env[62923]: DEBUG nova.network.neutron [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.591666] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.591937] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 588.592144] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 588.592513] env[62923]: DEBUG oslo_concurrency.lockutils [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] Acquired lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.593241] env[62923]: DEBUG nova.network.neutron [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Refreshing network info cache for port 572b379b-a2f8-4652-b17d-3e90afacb781 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 588.594346] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb1360a9-831d-4d18-a150-0459dae3deaa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.604957] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0041b401-5200-4fc4-a454-e94f7f8f799a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.629187] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 147165a4-9071-4516-9498-fa4c706a5e37 could not be found. [ 588.629187] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 588.629187] env[62923]: INFO nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Took 0.04 seconds to destroy the instance on the hypervisor. [ 588.629187] env[62923]: DEBUG oslo.service.loopingcall [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.629187] env[62923]: DEBUG nova.compute.manager [-] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.629187] env[62923]: DEBUG nova.network.neutron [-] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 588.670546] env[62923]: DEBUG nova.network.neutron [-] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.708019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.979s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.708784] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.036s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.710587] env[62923]: INFO nova.compute.claims [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.729065] env[62923]: INFO nova.scheduler.client.report [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Deleted allocations for instance a33da17c-bbb2-4307-b4b3-56cec5cb757e [ 588.764227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e21b4e5-a6f0-4d38-97c2-a7492dfccbbf tempest-VolumesAssistedSnapshotsTest-202943557 tempest-VolumesAssistedSnapshotsTest-202943557-project-member] Lock "e08d9f27-d2b3-4532-862f-b68e830e8d17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.031s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.054141] env[62923]: INFO nova.compute.manager [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] [instance: 33910d6d-0911-4e2a-82af-1b705cedd3fe] Took 1.04 seconds to deallocate network for instance. [ 589.125716] env[62923]: DEBUG nova.network.neutron [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.175125] env[62923]: DEBUG nova.network.neutron [-] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.239148] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b311d102-feca-4a20-935c-c5b291839fb5 tempest-ServersAdmin275Test-597350171 tempest-ServersAdmin275Test-597350171-project-member] Lock "a33da17c-bbb2-4307-b4b3-56cec5cb757e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.325s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.266666] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 589.302848] env[62923]: DEBUG nova.network.neutron [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.680098] env[62923]: INFO nova.compute.manager [-] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Took 1.05 seconds to deallocate network for instance. [ 589.682826] env[62923]: DEBUG nova.compute.claims [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 589.682826] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.806230] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.807829] env[62923]: DEBUG oslo_concurrency.lockutils [req-6a6fa67a-f50a-44c1-a913-392a4925d57c req-110fc9a4-7116-4fd0-aa92-35199fe0ca0c service nova] Releasing lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.036990] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "c11d0dcc-e5aa-4d7c-bba5-2853622dde44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.036990] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "c11d0dcc-e5aa-4d7c-bba5-2853622dde44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.100324] env[62923]: INFO nova.scheduler.client.report [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Deleted allocations for instance 33910d6d-0911-4e2a-82af-1b705cedd3fe [ 590.227977] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6082059-249f-4347-bdb6-dc7f56af0830 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.235080] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27644732-95d1-4a84-afda-e4c42717dece {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.267165] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf595b93-39fc-4882-bdcd-ec40b41d4734 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.274814] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30cc5d9-993e-4e72-bf84-2c9b3cfc5ab5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.291351] env[62923]: DEBUG nova.compute.provider_tree [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.591929] env[62923]: DEBUG nova.compute.manager [req-10c2bce6-ca4c-4453-902f-ea93775ba562 req-b85a4622-8d55-4f1e-84de-29d16023d17e service nova] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Received event network-vif-deleted-572b379b-a2f8-4652-b17d-3e90afacb781 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 590.618048] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7139035f-cef6-4200-9ed2-367d8df44db9 tempest-FloatingIPsAssociationTestJSON-1089918034 tempest-FloatingIPsAssociationTestJSON-1089918034-project-member] Lock "33910d6d-0911-4e2a-82af-1b705cedd3fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.085s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.793749] env[62923]: DEBUG nova.scheduler.client.report [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 591.121330] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 591.302386] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.302386] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 591.310431] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.080s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.650010] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.815861] env[62923]: DEBUG nova.compute.utils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.817377] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 591.817544] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 591.933688] env[62923]: DEBUG nova.policy [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e632f48c0b2406dbce67e74921c9b02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '018e2d331fd5438c9caaea7d093d45cb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 592.233281] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acbfb7f-8856-4c00-a40b-b2e8be720f66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.245243] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413852d2-a811-477a-9323-8f961aa51244 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.281857] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27750de-1e7b-432d-a9f8-77f9fd4fe781 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.289555] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e531ce38-8fc0-4685-8f01-e63ad5a5851e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.303411] env[62923]: DEBUG nova.compute.provider_tree [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.323304] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 592.806674] env[62923]: DEBUG nova.scheduler.client.report [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 593.083320] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Successfully created port: 9d162aad-0e04-43ff-85c9-d731e7490d5d {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.311678] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.312373] env[62923]: ERROR nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Traceback (most recent call last): [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self.driver.spawn(context, instance, image_meta, [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] vm_ref = self.build_virtual_machine(instance, [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.312373] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] for vif in network_info: [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return self._sync_wrapper(fn, *args, **kwargs) [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self.wait() [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self[:] = self._gt.wait() [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return self._exit_event.wait() [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] result = hub.switch() [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.312871] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return self.greenlet.switch() [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] result = function(*args, **kwargs) [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] return func(*args, **kwargs) [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] raise e [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] nwinfo = self.network_api.allocate_for_instance( [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] created_port_ids = self._update_ports_for_instance( [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] with excutils.save_and_reraise_exception(): [ 593.313309] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] self.force_reraise() [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] raise self.value [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] updated_port = self._update_port( [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] _ensure_no_port_binding_failure(port) [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] raise exception.PortBindingFailed(port_id=port['id']) [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] nova.exception.PortBindingFailed: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. [ 593.313939] env[62923]: ERROR nova.compute.manager [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] [ 593.314551] env[62923]: DEBUG nova.compute.utils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 593.315574] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.622s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.317058] env[62923]: INFO nova.compute.claims [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.319663] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Build of instance 25758cb8-6635-4284-bc94-a95389af3c8a was re-scheduled: Binding failed for port 42f42e23-879b-49d3-9d2f-4f1e206bbfae, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 593.322178] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 593.322178] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquiring lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.322178] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Acquired lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.322178] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.334030] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 593.379550] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 593.379550] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 593.379550] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.379723] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 593.379723] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.379798] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 593.379970] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 593.381737] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 593.381737] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 593.381737] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 593.383257] env[62923]: DEBUG nova.virt.hardware [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.387044] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2b120a-818c-4793-a236-305445bbd603 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.399713] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bb4006-9477-4d7b-903a-a23fbf350f31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.851926] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.238413] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.704777] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b1d8d6-a4e1-4ee7-9ddc-dcb5e2ef2161 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.714220] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db52fa0-b60b-44b8-8ed7-a0fc737ebe98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.747809] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Releasing lock "refresh_cache-25758cb8-6635-4284-bc94-a95389af3c8a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.749256] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 594.749256] env[62923]: DEBUG nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.749256] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 594.750961] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9506fa35-9508-47af-b282-1ab96300afd4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.763156] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8578dfd-e0be-45f3-8acc-54d7b1f2d493 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.779467] env[62923]: DEBUG nova.compute.provider_tree [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.793016] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.282467] env[62923]: DEBUG nova.scheduler.client.report [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.298509] env[62923]: DEBUG nova.network.neutron [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.578343] env[62923]: DEBUG nova.compute.manager [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Received event network-changed-9d162aad-0e04-43ff-85c9-d731e7490d5d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 595.578936] env[62923]: DEBUG nova.compute.manager [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Refreshing instance network info cache due to event network-changed-9d162aad-0e04-43ff-85c9-d731e7490d5d. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 595.579171] env[62923]: DEBUG oslo_concurrency.lockutils [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] Acquiring lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.579337] env[62923]: DEBUG oslo_concurrency.lockutils [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] Acquired lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.579546] env[62923]: DEBUG nova.network.neutron [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Refreshing network info cache for port 9d162aad-0e04-43ff-85c9-d731e7490d5d {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.788591] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.789059] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 595.793963] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.239s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.796716] env[62923]: INFO nova.compute.claims [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.804831] env[62923]: INFO nova.compute.manager [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] [instance: 25758cb8-6635-4284-bc94-a95389af3c8a] Took 1.05 seconds to deallocate network for instance. [ 595.913356] env[62923]: ERROR nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 595.913356] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.913356] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.913356] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.913356] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.913356] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.913356] env[62923]: ERROR nova.compute.manager raise self.value [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.913356] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 595.913356] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.913356] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 595.914563] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.914563] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 595.914563] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 595.914563] env[62923]: ERROR nova.compute.manager [ 595.914563] env[62923]: Traceback (most recent call last): [ 595.914563] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 595.914563] env[62923]: listener.cb(fileno) [ 595.914563] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.914563] env[62923]: result = function(*args, **kwargs) [ 595.914563] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 595.914563] env[62923]: return func(*args, **kwargs) [ 595.914563] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.914563] env[62923]: raise e [ 595.914563] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.914563] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 595.914563] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.914563] env[62923]: created_port_ids = self._update_ports_for_instance( [ 595.914563] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.914563] env[62923]: with excutils.save_and_reraise_exception(): [ 595.914563] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.914563] env[62923]: self.force_reraise() [ 595.914563] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.914563] env[62923]: raise self.value [ 595.914563] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.914563] env[62923]: updated_port = self._update_port( [ 595.914563] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.914563] env[62923]: _ensure_no_port_binding_failure(port) [ 595.914563] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.914563] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 595.915542] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 595.915542] env[62923]: Removing descriptor: 17 [ 595.915542] env[62923]: ERROR nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Traceback (most recent call last): [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] yield resources [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self.driver.spawn(context, instance, image_meta, [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.915542] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] vm_ref = self.build_virtual_machine(instance, [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] vif_infos = vmwarevif.get_vif_info(self._session, [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] for vif in network_info: [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return self._sync_wrapper(fn, *args, **kwargs) [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self.wait() [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self[:] = self._gt.wait() [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return self._exit_event.wait() [ 595.915929] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] result = hub.switch() [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return self.greenlet.switch() [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] result = function(*args, **kwargs) [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return func(*args, **kwargs) [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] raise e [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] nwinfo = self.network_api.allocate_for_instance( [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 595.916351] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] created_port_ids = self._update_ports_for_instance( [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] with excutils.save_and_reraise_exception(): [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self.force_reraise() [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] raise self.value [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] updated_port = self._update_port( [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] _ensure_no_port_binding_failure(port) [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.916809] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] raise exception.PortBindingFailed(port_id=port['id']) [ 595.917484] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 595.917484] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] [ 595.917484] env[62923]: INFO nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Terminating instance [ 595.922557] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquiring lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.121126] env[62923]: DEBUG nova.network.neutron [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.244769] env[62923]: DEBUG nova.network.neutron [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.308658] env[62923]: DEBUG nova.compute.utils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 596.314591] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 596.314591] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.393165] env[62923]: DEBUG nova.policy [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1794e9391e5e4d18b90e522bd2acc8ef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '276d6c9494694392ac867f5f3976b79e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 596.748736] env[62923]: DEBUG oslo_concurrency.lockutils [req-5cdcd1ee-4cca-4cad-a207-65e7462048f3 req-526de230-ade1-44a1-abf5-89dd1a7c1443 service nova] Releasing lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.749319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquired lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.749615] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.812867] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Successfully created port: 4b93a39a-68b5-4df8-836c-65d9d217dd6b {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.823156] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 596.868291] env[62923]: INFO nova.scheduler.client.report [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Deleted allocations for instance 25758cb8-6635-4284-bc94-a95389af3c8a [ 597.243615] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecba615-6e03-45e0-ae55-8920eaf23642 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.256655] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c344a3f-98d8-4b6b-b0d7-1425bec9ae23 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.296497] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.299676] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c2e8a4-8925-407c-967f-1b786ddaa2bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.308150] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab6c59a-d6e1-4e41-ada6-bffa85092152 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.325177] env[62923]: DEBUG nova.compute.provider_tree [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.385076] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f25d0e30-8ad9-41f6-8a30-8fc95085a42e tempest-ServersWithSpecificFlavorTestJSON-1230324252 tempest-ServersWithSpecificFlavorTestJSON-1230324252-project-member] Lock "25758cb8-6635-4284-bc94-a95389af3c8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.167s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.455695] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquiring lock "81c87881-bf63-4622-a0cb-6e38680a8f14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.455953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Lock "81c87881-bf63-4622-a0cb-6e38680a8f14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.549605] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.760034] env[62923]: DEBUG nova.compute.manager [req-3b86a2f6-5010-4f3d-b8e9-ef0d186e2f75 req-e937658a-6131-42a3-847a-bb5ce6bc3b1a service nova] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Received event network-vif-deleted-9d162aad-0e04-43ff-85c9-d731e7490d5d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.830033] env[62923]: DEBUG nova.scheduler.client.report [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.837079] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 597.883185] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 597.883185] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 597.883185] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.883422] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 597.883422] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.884357] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 597.884357] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 597.884357] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 597.884357] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 597.884822] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 597.885143] env[62923]: DEBUG nova.virt.hardware [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.889135] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c578c7-9a46-4d92-8791-7c1b0be8c761 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.891943] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 597.906888] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde33bd3-689b-4226-a420-686b04df89c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.053473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Releasing lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.053843] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 598.054597] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 598.054597] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2fdc0f69-0d81-447f-b7ce-8dec07f85b8c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.067961] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a677f97-b929-41e9-9a51-6f3956c94564 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.095502] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a8c7285-35dd-4112-b84a-ea384aead074 could not be found. [ 598.095502] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.095502] env[62923]: INFO nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Took 0.04 seconds to destroy the instance on the hypervisor. [ 598.095502] env[62923]: DEBUG oslo.service.loopingcall [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.095740] env[62923]: DEBUG nova.compute.manager [-] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 598.095785] env[62923]: DEBUG nova.network.neutron [-] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 598.142913] env[62923]: DEBUG nova.network.neutron [-] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.340915] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.341450] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 598.346158] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.906s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.346158] env[62923]: INFO nova.compute.claims [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.434429] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.646532] env[62923]: DEBUG nova.network.neutron [-] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.853649] env[62923]: DEBUG nova.compute.utils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.855086] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 598.859020] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.150180] env[62923]: INFO nova.compute.manager [-] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Took 1.05 seconds to deallocate network for instance. [ 599.153281] env[62923]: DEBUG nova.compute.claims [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 599.156589] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.172217] env[62923]: DEBUG nova.policy [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4aa4273c50294c8da92344a6ed3e4ae2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6d54eb98d27441bb7b64ac757d91de3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 599.358691] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 599.745092] env[62923]: ERROR nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 599.745092] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.745092] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.745092] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.745092] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.745092] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.745092] env[62923]: ERROR nova.compute.manager raise self.value [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.745092] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 599.745092] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.745092] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 599.745687] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.745687] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 599.745687] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 599.745687] env[62923]: ERROR nova.compute.manager [ 599.745687] env[62923]: Traceback (most recent call last): [ 599.745687] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 599.745687] env[62923]: listener.cb(fileno) [ 599.745687] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.745687] env[62923]: result = function(*args, **kwargs) [ 599.745687] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 599.745687] env[62923]: return func(*args, **kwargs) [ 599.745687] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.745687] env[62923]: raise e [ 599.745687] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.745687] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 599.745687] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.745687] env[62923]: created_port_ids = self._update_ports_for_instance( [ 599.745687] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.745687] env[62923]: with excutils.save_and_reraise_exception(): [ 599.745687] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.745687] env[62923]: self.force_reraise() [ 599.745687] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.745687] env[62923]: raise self.value [ 599.745687] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.745687] env[62923]: updated_port = self._update_port( [ 599.745687] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.745687] env[62923]: _ensure_no_port_binding_failure(port) [ 599.745687] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.745687] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 599.746654] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 599.746654] env[62923]: Removing descriptor: 17 [ 599.746654] env[62923]: ERROR nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Traceback (most recent call last): [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] yield resources [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self.driver.spawn(context, instance, image_meta, [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self._vmops.spawn(context, instance, image_meta, injected_files, [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 599.746654] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] vm_ref = self.build_virtual_machine(instance, [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] vif_infos = vmwarevif.get_vif_info(self._session, [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] for vif in network_info: [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return self._sync_wrapper(fn, *args, **kwargs) [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self.wait() [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self[:] = self._gt.wait() [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return self._exit_event.wait() [ 599.747061] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] result = hub.switch() [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return self.greenlet.switch() [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] result = function(*args, **kwargs) [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return func(*args, **kwargs) [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] raise e [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] nwinfo = self.network_api.allocate_for_instance( [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 599.747491] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] created_port_ids = self._update_ports_for_instance( [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] with excutils.save_and_reraise_exception(): [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self.force_reraise() [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] raise self.value [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] updated_port = self._update_port( [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] _ensure_no_port_binding_failure(port) [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 599.747923] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] raise exception.PortBindingFailed(port_id=port['id']) [ 599.748324] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 599.748324] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] [ 599.748324] env[62923]: INFO nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Terminating instance [ 599.749097] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquiring lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.749097] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquired lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.749097] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.758632] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5ca005-9cf2-4b04-811c-bb366cf51a83 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.768675] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80565417-7b54-45b0-b011-d2fc32ace5a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.803778] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43715af-195e-44f1-9c4e-338780424cf8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.811226] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df94f52d-1872-442f-a6c2-ffc9eb5bcedb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.825149] env[62923]: DEBUG nova.compute.provider_tree [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.163590] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Successfully created port: 1221c65a-3610-477c-97f6-0202d2be27cb {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.303081] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.329503] env[62923]: DEBUG nova.scheduler.client.report [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.378832] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.418767] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.418913] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.418994] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.419187] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.419326] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.419670] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.419670] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.419815] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.419970] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.420658] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.420658] env[62923]: DEBUG nova.virt.hardware [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.421689] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37cceb5-3206-4ba8-9bf5-0423d0807e0e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.436354] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db929b5-8f6d-4b82-b4e3-c1cbddca409d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.478131] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.527510] env[62923]: DEBUG nova.compute.manager [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Received event network-changed-4b93a39a-68b5-4df8-836c-65d9d217dd6b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 600.527693] env[62923]: DEBUG nova.compute.manager [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Refreshing instance network info cache due to event network-changed-4b93a39a-68b5-4df8-836c-65d9d217dd6b. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 600.528256] env[62923]: DEBUG oslo_concurrency.lockutils [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] Acquiring lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.834973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.835372] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 600.838474] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.809s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.842276] env[62923]: INFO nova.compute.claims [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.980686] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Releasing lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.981407] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 600.981678] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.982516] env[62923]: DEBUG oslo_concurrency.lockutils [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] Acquired lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.983235] env[62923]: DEBUG nova.network.neutron [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Refreshing network info cache for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 600.984494] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43a7179a-00f6-443c-831e-471f720faedf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.997559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b938abb-69be-4f05-9e36-bcf012e8a180 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.027023] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ab0f02a1-f883-4ad6-8f8c-5c300fff0f70 could not be found. [ 601.027023] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.027023] env[62923]: INFO nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Took 0.04 seconds to destroy the instance on the hypervisor. [ 601.027023] env[62923]: DEBUG oslo.service.loopingcall [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.027023] env[62923]: DEBUG nova.compute.manager [-] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.027023] env[62923]: DEBUG nova.network.neutron [-] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.058912] env[62923]: DEBUG nova.network.neutron [-] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.156861] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "d45fe9ea-8538-47da-b8dd-c67f8863a812" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.157287] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "d45fe9ea-8538-47da-b8dd-c67f8863a812" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.343993] env[62923]: DEBUG nova.compute.utils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.349893] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.351199] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.475420] env[62923]: DEBUG nova.policy [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4d4a83e60df4c1fb7812183126b370e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f69e1ca17f749ebb1fb432cf3d68310', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.532961] env[62923]: DEBUG nova.network.neutron [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.560538] env[62923]: DEBUG nova.network.neutron [-] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.753409] env[62923]: DEBUG nova.network.neutron [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.860714] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 601.974426] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "325e8102-c129-40f4-b61d-1976d2a1fe42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.974426] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "325e8102-c129-40f4-b61d-1976d2a1fe42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.010032] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "fa7295fe-b893-455b-9d4b-4013c187c288" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.010294] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "fa7295fe-b893-455b-9d4b-4013c187c288" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.046785] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "627ebcab-90f9-4ebe-baf9-52fe808ec8c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.049916] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "627ebcab-90f9-4ebe-baf9-52fe808ec8c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.067488] env[62923]: INFO nova.compute.manager [-] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Took 1.04 seconds to deallocate network for instance. [ 602.071831] env[62923]: DEBUG nova.compute.claims [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 602.071831] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.255827] env[62923]: DEBUG oslo_concurrency.lockutils [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] Releasing lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.256141] env[62923]: DEBUG nova.compute.manager [req-1af2e545-5db1-4bf2-8f33-9a5de3b0d7bd req-291c5793-3e52-485e-b299-67062ffd3774 service nova] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Received event network-vif-deleted-4b93a39a-68b5-4df8-836c-65d9d217dd6b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 602.257327] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Successfully created port: f53dec6a-8c2a-461a-9555-1212dc01ba74 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.314939] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb986e77-a2c9-45f5-89b5-fc01fe9e88cc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.327168] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e640912-090d-47d7-b24d-720cef00360b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.365335] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798a3205-ff22-4ca8-94bf-30f2841ce45b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.379414] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceebee5a-0c4c-424c-8cb1-ef9d32c61a60 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.397088] env[62923]: DEBUG nova.compute.provider_tree [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.402517] env[62923]: ERROR nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 602.402517] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.402517] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.402517] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.402517] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.402517] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.402517] env[62923]: ERROR nova.compute.manager raise self.value [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.402517] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 602.402517] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.402517] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 602.402903] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.402903] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 602.402903] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 602.402903] env[62923]: ERROR nova.compute.manager [ 602.402903] env[62923]: Traceback (most recent call last): [ 602.402903] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 602.402903] env[62923]: listener.cb(fileno) [ 602.402903] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.402903] env[62923]: result = function(*args, **kwargs) [ 602.402903] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 602.402903] env[62923]: return func(*args, **kwargs) [ 602.402903] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.402903] env[62923]: raise e [ 602.402903] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.402903] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 602.402903] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.402903] env[62923]: created_port_ids = self._update_ports_for_instance( [ 602.402903] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.402903] env[62923]: with excutils.save_and_reraise_exception(): [ 602.402903] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.402903] env[62923]: self.force_reraise() [ 602.402903] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.402903] env[62923]: raise self.value [ 602.402903] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.402903] env[62923]: updated_port = self._update_port( [ 602.402903] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.402903] env[62923]: _ensure_no_port_binding_failure(port) [ 602.402903] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.402903] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 602.404085] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 602.404085] env[62923]: Removing descriptor: 21 [ 602.404085] env[62923]: ERROR nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Traceback (most recent call last): [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] yield resources [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self.driver.spawn(context, instance, image_meta, [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.404085] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] vm_ref = self.build_virtual_machine(instance, [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] for vif in network_info: [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return self._sync_wrapper(fn, *args, **kwargs) [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self.wait() [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self[:] = self._gt.wait() [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return self._exit_event.wait() [ 602.404445] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] result = hub.switch() [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return self.greenlet.switch() [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] result = function(*args, **kwargs) [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return func(*args, **kwargs) [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] raise e [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] nwinfo = self.network_api.allocate_for_instance( [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.404759] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] created_port_ids = self._update_ports_for_instance( [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] with excutils.save_and_reraise_exception(): [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self.force_reraise() [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] raise self.value [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] updated_port = self._update_port( [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] _ensure_no_port_binding_failure(port) [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.405086] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] raise exception.PortBindingFailed(port_id=port['id']) [ 602.405362] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 602.405362] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] [ 602.405362] env[62923]: INFO nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Terminating instance [ 602.406437] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquiring lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.406437] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquired lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.406437] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.567093] env[62923]: DEBUG nova.compute.manager [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Received event network-changed-1221c65a-3610-477c-97f6-0202d2be27cb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 602.567190] env[62923]: DEBUG nova.compute.manager [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Refreshing instance network info cache due to event network-changed-1221c65a-3610-477c-97f6-0202d2be27cb. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 602.567816] env[62923]: DEBUG oslo_concurrency.lockutils [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] Acquiring lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.878784] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 602.900297] env[62923]: DEBUG nova.scheduler.client.report [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.919439] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.919439] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.919439] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.919568] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.919568] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.919664] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.919848] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.920028] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.920693] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.920693] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.920693] env[62923]: DEBUG nova.virt.hardware [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.921445] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a0129f-6ce9-46f4-9637-0ec8cd4a6708 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.932213] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f7656-887b-44b0-959a-c12abefd2f31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.956050] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.160843] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.415947] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.416480] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 603.419137] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.615s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.666952] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Releasing lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.668030] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 603.668279] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 603.668670] env[62923]: DEBUG oslo_concurrency.lockutils [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] Acquired lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.669423] env[62923]: DEBUG nova.network.neutron [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Refreshing network info cache for port 1221c65a-3610-477c-97f6-0202d2be27cb {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.671790] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d42e9d7-1388-45fc-b613-8aa52cf309be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.683684] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f99ed8-6f95-4a30-9b6f-fce3bfea0014 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.710287] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 353b72b2-cd56-442f-9010-c75baf8f5a48 could not be found. [ 603.710287] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 603.710287] env[62923]: INFO nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Took 0.04 seconds to destroy the instance on the hypervisor. [ 603.710287] env[62923]: DEBUG oslo.service.loopingcall [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.710287] env[62923]: DEBUG nova.compute.manager [-] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.710287] env[62923]: DEBUG nova.network.neutron [-] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 603.735863] env[62923]: DEBUG nova.network.neutron [-] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.928942] env[62923]: DEBUG nova.compute.utils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.930114] env[62923]: ERROR nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 603.930114] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.930114] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.930114] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.930114] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.930114] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.930114] env[62923]: ERROR nova.compute.manager raise self.value [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.930114] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 603.930114] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.930114] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 603.930635] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.930635] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 603.930635] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 603.930635] env[62923]: ERROR nova.compute.manager [ 603.930635] env[62923]: Traceback (most recent call last): [ 603.930635] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 603.930635] env[62923]: listener.cb(fileno) [ 603.930635] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.930635] env[62923]: result = function(*args, **kwargs) [ 603.930635] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.930635] env[62923]: return func(*args, **kwargs) [ 603.930635] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.930635] env[62923]: raise e [ 603.930635] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.930635] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 603.930635] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.930635] env[62923]: created_port_ids = self._update_ports_for_instance( [ 603.930635] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.930635] env[62923]: with excutils.save_and_reraise_exception(): [ 603.930635] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.930635] env[62923]: self.force_reraise() [ 603.930635] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.930635] env[62923]: raise self.value [ 603.930635] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.930635] env[62923]: updated_port = self._update_port( [ 603.930635] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.930635] env[62923]: _ensure_no_port_binding_failure(port) [ 603.930635] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.930635] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 603.932965] env[62923]: nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 603.932965] env[62923]: Removing descriptor: 17 [ 603.932965] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 603.932965] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.933219] env[62923]: ERROR nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Traceback (most recent call last): [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] yield resources [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self.driver.spawn(context, instance, image_meta, [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] vm_ref = self.build_virtual_machine(instance, [ 603.933219] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] for vif in network_info: [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return self._sync_wrapper(fn, *args, **kwargs) [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self.wait() [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self[:] = self._gt.wait() [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return self._exit_event.wait() [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.933502] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] result = hub.switch() [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return self.greenlet.switch() [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] result = function(*args, **kwargs) [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return func(*args, **kwargs) [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] raise e [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] nwinfo = self.network_api.allocate_for_instance( [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] created_port_ids = self._update_ports_for_instance( [ 603.933862] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] with excutils.save_and_reraise_exception(): [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self.force_reraise() [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] raise self.value [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] updated_port = self._update_port( [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] _ensure_no_port_binding_failure(port) [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] raise exception.PortBindingFailed(port_id=port['id']) [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 603.934230] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] [ 603.934554] env[62923]: INFO nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Terminating instance [ 603.935572] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquiring lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.935813] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquired lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.935879] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.992052] env[62923]: DEBUG nova.policy [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c654b8365f5543f3bf713f3f5aa00654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a654d46357ed49cd95460a56926f102a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.193320] env[62923]: DEBUG nova.network.neutron [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.239250] env[62923]: DEBUG nova.network.neutron [-] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.382924] env[62923]: DEBUG nova.network.neutron [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.402711] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27537353-8b29-4b86-9fdc-955f70827d80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.411476] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e79432e-7029-4b2b-9428-fcb566c88a86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.447993] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 604.453923] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Successfully created port: 648493d0-da21-4752-941b-c298fb95d7e4 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.458121] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fc5e08-05b7-415d-8c5a-c3464939c0ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.467585] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0621785-73cd-4803-9fda-c2ea19bd9dd4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.480963] env[62923]: DEBUG nova.compute.provider_tree [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.496674] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.661070] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.744475] env[62923]: INFO nova.compute.manager [-] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Took 1.03 seconds to deallocate network for instance. [ 604.746032] env[62923]: DEBUG nova.compute.claims [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 604.746631] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.854415] env[62923]: DEBUG nova.compute.manager [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Received event network-changed-f53dec6a-8c2a-461a-9555-1212dc01ba74 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 604.854510] env[62923]: DEBUG nova.compute.manager [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Refreshing instance network info cache due to event network-changed-f53dec6a-8c2a-461a-9555-1212dc01ba74. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 604.854645] env[62923]: DEBUG oslo_concurrency.lockutils [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] Acquiring lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.885716] env[62923]: DEBUG oslo_concurrency.lockutils [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] Releasing lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.886279] env[62923]: DEBUG nova.compute.manager [req-4cc83444-8f8a-4048-957c-39efee1d492b req-7b5e11a3-00c4-4e4f-99c1-a48324c7c1dd service nova] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Received event network-vif-deleted-1221c65a-3610-477c-97f6-0202d2be27cb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 604.987429] env[62923]: DEBUG nova.scheduler.client.report [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.165677] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Releasing lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.166930] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.166930] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.166930] env[62923]: DEBUG oslo_concurrency.lockutils [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] Acquired lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.166930] env[62923]: DEBUG nova.network.neutron [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Refreshing network info cache for port f53dec6a-8c2a-461a-9555-1212dc01ba74 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 605.167827] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fdcab43-9aa7-4fe3-b19e-28bbcbe9a93b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.184640] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748474c8-6620-4ba6-86e4-aecfa2161732 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.212429] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03ee0097-1200-43ce-9baa-e9da80105516 could not be found. [ 605.213660] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.213893] env[62923]: INFO nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Took 0.05 seconds to destroy the instance on the hypervisor. [ 605.215173] env[62923]: DEBUG oslo.service.loopingcall [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 605.215173] env[62923]: DEBUG nova.compute.manager [-] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.215173] env[62923]: DEBUG nova.network.neutron [-] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.237297] env[62923]: DEBUG nova.network.neutron [-] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.464349] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 605.491984] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.492403] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.492625] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.492871] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.493079] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.493339] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.493569] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.493728] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.493898] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.494069] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.494241] env[62923]: DEBUG nova.virt.hardware [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.501017] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d658b0-913b-4d07-8aae-8f370bedca1a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.502720] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.083s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.503392] env[62923]: ERROR nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Traceback (most recent call last): [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self.driver.spawn(context, instance, image_meta, [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] vm_ref = self.build_virtual_machine(instance, [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.503392] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] for vif in network_info: [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return self._sync_wrapper(fn, *args, **kwargs) [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self.wait() [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self[:] = self._gt.wait() [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return self._exit_event.wait() [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] result = hub.switch() [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 605.503666] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return self.greenlet.switch() [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] result = function(*args, **kwargs) [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] return func(*args, **kwargs) [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] raise e [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] nwinfo = self.network_api.allocate_for_instance( [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] created_port_ids = self._update_ports_for_instance( [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] with excutils.save_and_reraise_exception(): [ 605.504091] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] self.force_reraise() [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] raise self.value [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] updated_port = self._update_port( [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] _ensure_no_port_binding_failure(port) [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] raise exception.PortBindingFailed(port_id=port['id']) [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] nova.exception.PortBindingFailed: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. [ 605.504414] env[62923]: ERROR nova.compute.manager [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] [ 605.504635] env[62923]: DEBUG nova.compute.utils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 605.505427] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.823s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.508942] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Build of instance 6f5cc707-70e8-48fb-8d9d-904f3e0130af was re-scheduled: Binding failed for port 11d62c03-b355-4dd9-8985-0a6ebcf8d0d0, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 605.509395] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 605.509617] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquiring lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.509761] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Acquired lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.509990] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.516537] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259dc250-f010-48a4-821e-733098aa32d3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.695589] env[62923]: DEBUG nova.network.neutron [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.741639] env[62923]: DEBUG nova.network.neutron [-] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.866789] env[62923]: DEBUG nova.network.neutron [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.019457] env[62923]: ERROR nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 606.019457] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.019457] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.019457] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.019457] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.019457] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.019457] env[62923]: ERROR nova.compute.manager raise self.value [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.019457] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 606.019457] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.019457] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 606.019862] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.019862] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 606.019862] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 606.019862] env[62923]: ERROR nova.compute.manager [ 606.019862] env[62923]: Traceback (most recent call last): [ 606.019862] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 606.019862] env[62923]: listener.cb(fileno) [ 606.019862] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.019862] env[62923]: result = function(*args, **kwargs) [ 606.019862] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.019862] env[62923]: return func(*args, **kwargs) [ 606.019862] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 606.019862] env[62923]: raise e [ 606.019862] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.019862] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 606.019862] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.019862] env[62923]: created_port_ids = self._update_ports_for_instance( [ 606.019862] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.019862] env[62923]: with excutils.save_and_reraise_exception(): [ 606.019862] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.019862] env[62923]: self.force_reraise() [ 606.019862] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.019862] env[62923]: raise self.value [ 606.019862] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.019862] env[62923]: updated_port = self._update_port( [ 606.019862] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.019862] env[62923]: _ensure_no_port_binding_failure(port) [ 606.019862] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.019862] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 606.020461] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 606.020461] env[62923]: Removing descriptor: 17 [ 606.020461] env[62923]: ERROR nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Traceback (most recent call last): [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] yield resources [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self.driver.spawn(context, instance, image_meta, [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.020461] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] vm_ref = self.build_virtual_machine(instance, [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] for vif in network_info: [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return self._sync_wrapper(fn, *args, **kwargs) [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self.wait() [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self[:] = self._gt.wait() [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return self._exit_event.wait() [ 606.020706] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] result = hub.switch() [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return self.greenlet.switch() [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] result = function(*args, **kwargs) [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return func(*args, **kwargs) [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] raise e [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] nwinfo = self.network_api.allocate_for_instance( [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.020965] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] created_port_ids = self._update_ports_for_instance( [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] with excutils.save_and_reraise_exception(): [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self.force_reraise() [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] raise self.value [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] updated_port = self._update_port( [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] _ensure_no_port_binding_failure(port) [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.021242] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] raise exception.PortBindingFailed(port_id=port['id']) [ 606.021480] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 606.021480] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] [ 606.021480] env[62923]: INFO nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Terminating instance [ 606.023011] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.023179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.023344] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.048111] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.151626] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.203927] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "c370e9a9-3c09-418c-b2fc-e75323298518" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.204276] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "c370e9a9-3c09-418c-b2fc-e75323298518" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.247057] env[62923]: INFO nova.compute.manager [-] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Took 1.03 seconds to deallocate network for instance. [ 606.250384] env[62923]: DEBUG nova.compute.claims [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 606.250594] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.368689] env[62923]: DEBUG oslo_concurrency.lockutils [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] Releasing lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.369333] env[62923]: DEBUG nova.compute.manager [req-68646d0f-d1d7-4e9c-91b0-81b2357d3644 req-dba76a12-0669-460a-9aae-c510166a1ded service nova] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Received event network-vif-deleted-f53dec6a-8c2a-461a-9555-1212dc01ba74 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 606.403817] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3ec113-2c38-44a3-9c37-daca903b80c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.413916] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bdee5c-0016-4686-b9d1-1531d26b9691 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.452413] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edd5c9b-e6a9-4d4c-a8b9-e7f83ac35bbe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.463086] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdb9dd6-f7d2-4a8b-9332-81a5fb7c72e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.478523] env[62923]: DEBUG nova.compute.provider_tree [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.541125] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.654656] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Releasing lock "refresh_cache-6f5cc707-70e8-48fb-8d9d-904f3e0130af" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.654910] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 606.655077] env[62923]: DEBUG nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.655329] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.663159] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.688310] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.876292] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "f853c572-ad40-4cce-83d4-d5f11b42c37f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.876524] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "f853c572-ad40-4cce-83d4-d5f11b42c37f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.982766] env[62923]: DEBUG nova.scheduler.client.report [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.047585] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquiring lock "b48be393-189f-4093-b079-fe555192e7ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.047822] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Lock "b48be393-189f-4093-b079-fe555192e7ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.100247] env[62923]: DEBUG nova.compute.manager [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Received event network-changed-648493d0-da21-4752-941b-c298fb95d7e4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.100463] env[62923]: DEBUG nova.compute.manager [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Refreshing instance network info cache due to event network-changed-648493d0-da21-4752-941b-c298fb95d7e4. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 607.100620] env[62923]: DEBUG oslo_concurrency.lockutils [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] Acquiring lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.166441] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.166441] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 607.166628] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.166900] env[62923]: DEBUG oslo_concurrency.lockutils [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] Acquired lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.167082] env[62923]: DEBUG nova.network.neutron [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Refreshing network info cache for port 648493d0-da21-4752-941b-c298fb95d7e4 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.168155] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32c7d13e-1f08-414b-940d-4730c9e8beb1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.177783] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2ce133-5681-4603-8778-2447566d0187 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.190529] env[62923]: DEBUG nova.network.neutron [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.199315] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0dba167b-aa56-4463-9749-b74fbc7430d9 could not be found. [ 607.199423] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.199547] env[62923]: INFO nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Took 0.03 seconds to destroy the instance on the hypervisor. [ 607.199778] env[62923]: DEBUG oslo.service.loopingcall [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.200476] env[62923]: DEBUG nova.compute.manager [-] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 607.200574] env[62923]: DEBUG nova.network.neutron [-] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.215760] env[62923]: DEBUG nova.network.neutron [-] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.489385] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.984s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.490086] env[62923]: ERROR nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Traceback (most recent call last): [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self.driver.spawn(context, instance, image_meta, [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] vm_ref = self.build_virtual_machine(instance, [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.490086] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] for vif in network_info: [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return self._sync_wrapper(fn, *args, **kwargs) [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self.wait() [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self[:] = self._gt.wait() [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return self._exit_event.wait() [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] result = hub.switch() [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.490545] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return self.greenlet.switch() [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] result = function(*args, **kwargs) [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] return func(*args, **kwargs) [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] raise e [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] nwinfo = self.network_api.allocate_for_instance( [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] created_port_ids = self._update_ports_for_instance( [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] with excutils.save_and_reraise_exception(): [ 607.490946] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] self.force_reraise() [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] raise self.value [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] updated_port = self._update_port( [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] _ensure_no_port_binding_failure(port) [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] raise exception.PortBindingFailed(port_id=port['id']) [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] nova.exception.PortBindingFailed: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. [ 607.491287] env[62923]: ERROR nova.compute.manager [instance: 147165a4-9071-4516-9498-fa4c706a5e37] [ 607.491572] env[62923]: DEBUG nova.compute.utils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 607.492050] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.688s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.495832] env[62923]: INFO nova.compute.claims [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.499035] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Build of instance 147165a4-9071-4516-9498-fa4c706a5e37 was re-scheduled: Binding failed for port 572b379b-a2f8-4652-b17d-3e90afacb781, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 607.499760] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 607.499760] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.499760] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.499760] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.696585] env[62923]: INFO nova.compute.manager [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] [instance: 6f5cc707-70e8-48fb-8d9d-904f3e0130af] Took 1.04 seconds to deallocate network for instance. [ 607.701608] env[62923]: DEBUG nova.network.neutron [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.718192] env[62923]: DEBUG nova.network.neutron [-] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.813586] env[62923]: DEBUG nova.network.neutron [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.035921] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.207940] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.221546] env[62923]: INFO nova.compute.manager [-] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Took 1.02 seconds to deallocate network for instance. [ 608.224371] env[62923]: DEBUG nova.compute.claims [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 608.224463] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.316451] env[62923]: DEBUG oslo_concurrency.lockutils [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] Releasing lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.316715] env[62923]: DEBUG nova.compute.manager [req-edd5e4bd-2cf7-4e06-888b-ae3716bfc7ff req-737455cc-691f-46ec-96cf-63694445b905 service nova] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Received event network-vif-deleted-648493d0-da21-4752-941b-c298fb95d7e4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 608.715057] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-147165a4-9071-4516-9498-fa4c706a5e37" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.715057] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 608.715057] env[62923]: DEBUG nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.715057] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 608.739610] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.753213] env[62923]: INFO nova.scheduler.client.report [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Deleted allocations for instance 6f5cc707-70e8-48fb-8d9d-904f3e0130af [ 608.963032] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ec7dfc-0101-4ea1-8842-c51a33585d48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.972067] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0813a3-d845-4985-81db-7dddb75dcfa2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.009438] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6059e501-ac53-4256-8f78-d9efe50a7cae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.018513] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92536582-8fa3-4735-963f-b53205b7794c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.034250] env[62923]: DEBUG nova.compute.provider_tree [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.242376] env[62923]: DEBUG nova.network.neutron [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.263163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-715e8879-bfa8-4740-8bf2-6cd1dd49984f tempest-ServersAdminNegativeTestJSON-1486091049 tempest-ServersAdminNegativeTestJSON-1486091049-project-member] Lock "6f5cc707-70e8-48fb-8d9d-904f3e0130af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.010s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.541144] env[62923]: DEBUG nova.scheduler.client.report [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.747879] env[62923]: INFO nova.compute.manager [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 147165a4-9071-4516-9498-fa4c706a5e37] Took 1.03 seconds to deallocate network for instance. [ 609.766402] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 610.044734] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.045794] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 610.051856] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.401s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.052621] env[62923]: INFO nova.compute.claims [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.180429] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.180482] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.302168] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.558892] env[62923]: DEBUG nova.compute.utils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.566030] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.566030] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 610.650308] env[62923]: DEBUG nova.policy [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '92a5a2e2ea1b4b0a94c1c809f3d99f19', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49db9f533ba64ec99edee9f12cf2195d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 610.688249] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.688413] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 610.688534] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 610.796683] env[62923]: INFO nova.scheduler.client.report [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 147165a4-9071-4516-9498-fa4c706a5e37 [ 611.065511] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 611.201026] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201026] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201026] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201026] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201026] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201026] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201242] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 611.201242] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Didn't find any instances for network info cache update. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 611.202672] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.202980] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.205629] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.205629] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.205629] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.205629] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.205629] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 611.205629] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.309484] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e2104dd1-24c9-4e26-9988-07506efbf3ab tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "147165a4-9071-4516-9498-fa4c706a5e37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.586s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.498449] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Successfully created port: e82cc592-0f33-4197-9751-24e4dfc31e08 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.534031] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f5e00c-9ed2-4d26-93f5-44419e9a8351 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.541815] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b971826-688c-4fc9-a36d-1b3b47a6e4f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.582132] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0670e56-ad97-47c9-bdb8-7cc785efd4ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.590463] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0877f23f-b165-4116-b335-6b3f01bd1ba1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.604690] env[62923]: DEBUG nova.compute.provider_tree [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.708408] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.816091] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 612.086228] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 612.107696] env[62923]: DEBUG nova.scheduler.client.report [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.131287] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.131553] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.131699] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.131885] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.132035] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.132177] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.132373] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.132523] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.132677] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.132832] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.133042] env[62923]: DEBUG nova.virt.hardware [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.133944] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e762af0-cfc6-42eb-9e28-c5cb6e21f2a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.146033] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89fa624-a918-4c17-842b-bc53d18908a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.344630] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.615927] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.620508] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 612.622480] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.192s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.623863] env[62923]: INFO nova.compute.claims [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.034541] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "a701b2b9-10df-4ba3-8b78-b6b486d8f1db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.034769] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a701b2b9-10df-4ba3-8b78-b6b486d8f1db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.129511] env[62923]: DEBUG nova.compute.utils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.134549] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 613.134549] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.249585] env[62923]: DEBUG nova.policy [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c2306873fe24d2db075445aeea97ddd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29ba5bda1975408fb2b2f4691089784d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 613.640414] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 613.990264] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Successfully created port: 573f86dc-c22b-42d3-8a61-5022ae6fc36c {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.005880] env[62923]: ERROR nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 614.005880] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.005880] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.005880] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.005880] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.005880] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.005880] env[62923]: ERROR nova.compute.manager raise self.value [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.005880] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 614.005880] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.005880] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 614.006295] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.006295] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 614.006295] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 614.006295] env[62923]: ERROR nova.compute.manager [ 614.006295] env[62923]: Traceback (most recent call last): [ 614.006295] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 614.006295] env[62923]: listener.cb(fileno) [ 614.006295] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.006295] env[62923]: result = function(*args, **kwargs) [ 614.006295] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.006295] env[62923]: return func(*args, **kwargs) [ 614.006295] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 614.006295] env[62923]: raise e [ 614.006295] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.006295] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 614.006295] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.006295] env[62923]: created_port_ids = self._update_ports_for_instance( [ 614.006295] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.006295] env[62923]: with excutils.save_and_reraise_exception(): [ 614.006295] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.006295] env[62923]: self.force_reraise() [ 614.006295] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.006295] env[62923]: raise self.value [ 614.006295] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.006295] env[62923]: updated_port = self._update_port( [ 614.006295] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.006295] env[62923]: _ensure_no_port_binding_failure(port) [ 614.006295] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.006295] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 614.006929] env[62923]: nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 614.006929] env[62923]: Removing descriptor: 17 [ 614.006929] env[62923]: ERROR nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Traceback (most recent call last): [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] yield resources [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self.driver.spawn(context, instance, image_meta, [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 614.006929] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] vm_ref = self.build_virtual_machine(instance, [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] vif_infos = vmwarevif.get_vif_info(self._session, [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] for vif in network_info: [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return self._sync_wrapper(fn, *args, **kwargs) [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self.wait() [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self[:] = self._gt.wait() [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return self._exit_event.wait() [ 614.007201] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] result = hub.switch() [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return self.greenlet.switch() [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] result = function(*args, **kwargs) [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return func(*args, **kwargs) [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] raise e [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] nwinfo = self.network_api.allocate_for_instance( [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 614.007476] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] created_port_ids = self._update_ports_for_instance( [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] with excutils.save_and_reraise_exception(): [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self.force_reraise() [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] raise self.value [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] updated_port = self._update_port( [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] _ensure_no_port_binding_failure(port) [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 614.007775] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] raise exception.PortBindingFailed(port_id=port['id']) [ 614.008111] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 614.008111] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] [ 614.008111] env[62923]: INFO nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Terminating instance [ 614.011222] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquiring lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.011222] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquired lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.011492] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.208645] env[62923]: DEBUG nova.compute.manager [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Received event network-changed-e82cc592-0f33-4197-9751-24e4dfc31e08 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 614.208916] env[62923]: DEBUG nova.compute.manager [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Refreshing instance network info cache due to event network-changed-e82cc592-0f33-4197-9751-24e4dfc31e08. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 614.209181] env[62923]: DEBUG oslo_concurrency.lockutils [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] Acquiring lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.222211] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831a022c-2a4b-4446-8129-675f6a118237 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.234712] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a1cb5d-6c1e-4f46-b0c4-2aaca044292e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.281813] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8593ed0-6bbb-4c36-a054-908dbdec6cc3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.290265] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3301e35-e33a-4b09-878b-c5cb492077be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.303805] env[62923]: DEBUG nova.compute.provider_tree [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.538347] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.662167] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 614.709486] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 614.709728] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 614.709920] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.711122] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 614.711373] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.712585] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 614.712585] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 614.712585] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 614.712585] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 614.712585] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 614.712802] env[62923]: DEBUG nova.virt.hardware [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 614.713383] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37cb4f2-874a-4edc-90d9-0020cd4ab89a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.728959] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1be298-b819-4110-b0bb-da7ae137fcfc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.736049] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19d3d9cd-0b44-409e-a4e4-0e4c73a02172 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "a004026e-ab4e-45b8-b4ab-d517496c9c7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.736287] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19d3d9cd-0b44-409e-a4e4-0e4c73a02172 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "a004026e-ab4e-45b8-b4ab-d517496c9c7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.739009] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquiring lock "fcee63c5-eaa3-4d8c-a612-9c30087433e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.739219] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Lock "fcee63c5-eaa3-4d8c-a612-9c30087433e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.807344] env[62923]: DEBUG nova.scheduler.client.report [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.899250] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.779803] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.157s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.780955] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Releasing lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.784070] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 615.784070] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 615.784070] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.629s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.784851] env[62923]: DEBUG oslo_concurrency.lockutils [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] Acquired lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.785091] env[62923]: DEBUG nova.network.neutron [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Refreshing network info cache for port e82cc592-0f33-4197-9751-24e4dfc31e08 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.786088] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-25ae1e16-dff5-4928-b2ff-fbab09a477bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.795569] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcddfe0c-837a-4643-953e-e53f4eeaa34e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.818209] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 386ffe8a-a160-4dea-88e6-529219eaf99f could not be found. [ 615.818481] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.818661] env[62923]: INFO nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 615.818948] env[62923]: DEBUG oslo.service.loopingcall [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.819229] env[62923]: DEBUG nova.compute.manager [-] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 615.819324] env[62923]: DEBUG nova.network.neutron [-] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.842063] env[62923]: DEBUG nova.network.neutron [-] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.083432] env[62923]: ERROR nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 616.083432] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.083432] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.083432] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.083432] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.083432] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.083432] env[62923]: ERROR nova.compute.manager raise self.value [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.083432] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 616.083432] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.083432] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 616.084578] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.084578] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 616.084578] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 616.084578] env[62923]: ERROR nova.compute.manager [ 616.084578] env[62923]: Traceback (most recent call last): [ 616.084578] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 616.084578] env[62923]: listener.cb(fileno) [ 616.084578] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.084578] env[62923]: result = function(*args, **kwargs) [ 616.084578] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.084578] env[62923]: return func(*args, **kwargs) [ 616.084578] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.084578] env[62923]: raise e [ 616.084578] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.084578] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 616.084578] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.084578] env[62923]: created_port_ids = self._update_ports_for_instance( [ 616.084578] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.084578] env[62923]: with excutils.save_and_reraise_exception(): [ 616.084578] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.084578] env[62923]: self.force_reraise() [ 616.084578] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.084578] env[62923]: raise self.value [ 616.084578] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.084578] env[62923]: updated_port = self._update_port( [ 616.084578] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.084578] env[62923]: _ensure_no_port_binding_failure(port) [ 616.084578] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.084578] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 616.085300] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 616.085300] env[62923]: Removing descriptor: 21 [ 616.085300] env[62923]: ERROR nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Traceback (most recent call last): [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] yield resources [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self.driver.spawn(context, instance, image_meta, [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.085300] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] vm_ref = self.build_virtual_machine(instance, [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] for vif in network_info: [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return self._sync_wrapper(fn, *args, **kwargs) [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self.wait() [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self[:] = self._gt.wait() [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return self._exit_event.wait() [ 616.085564] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] result = hub.switch() [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return self.greenlet.switch() [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] result = function(*args, **kwargs) [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return func(*args, **kwargs) [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] raise e [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] nwinfo = self.network_api.allocate_for_instance( [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.085852] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] created_port_ids = self._update_ports_for_instance( [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] with excutils.save_and_reraise_exception(): [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self.force_reraise() [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] raise self.value [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] updated_port = self._update_port( [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] _ensure_no_port_binding_failure(port) [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.086148] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] raise exception.PortBindingFailed(port_id=port['id']) [ 616.086422] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 616.086422] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] [ 616.086422] env[62923]: INFO nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Terminating instance [ 616.086422] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.086582] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquired lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.086738] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 616.234642] env[62923]: DEBUG nova.compute.manager [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Received event network-vif-deleted-e82cc592-0f33-4197-9751-24e4dfc31e08 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 616.234642] env[62923]: DEBUG nova.compute.manager [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Received event network-changed-573f86dc-c22b-42d3-8a61-5022ae6fc36c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 616.234642] env[62923]: DEBUG nova.compute.manager [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Refreshing instance network info cache due to event network-changed-573f86dc-c22b-42d3-8a61-5022ae6fc36c. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 616.234642] env[62923]: DEBUG oslo_concurrency.lockutils [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] Acquiring lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.283909] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquiring lock "5fcd4d38-9aad-496c-9e25-7489c6bc9095" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.283909] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "5fcd4d38-9aad-496c-9e25-7489c6bc9095" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.316422] env[62923]: DEBUG nova.network.neutron [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.348144] env[62923]: DEBUG nova.network.neutron [-] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.436379] env[62923]: DEBUG nova.network.neutron [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.469122] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "9b334c72-11f5-4165-a350-09fe5487a9a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.469529] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9b334c72-11f5-4165-a350-09fe5487a9a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.605174] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 616.647382] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.680451] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9d761d-e89e-488b-895c-9e285e4068a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.689223] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3f02e9-8ef9-42d3-9a5b-cfbd59fef372 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.719857] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5659843-8f17-4707-9d16-d19a0a3f6976 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.727144] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a5a9f3-2854-4a18-b564-dced16ad85cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.740241] env[62923]: DEBUG nova.compute.provider_tree [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.786040] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "5fcd4d38-9aad-496c-9e25-7489c6bc9095" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.502s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.786304] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 616.852042] env[62923]: INFO nova.compute.manager [-] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Took 1.03 seconds to deallocate network for instance. [ 616.854399] env[62923]: DEBUG nova.compute.claims [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 616.854574] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.939746] env[62923]: DEBUG oslo_concurrency.lockutils [req-70e3ef64-d005-4511-8fe1-f4c9d0ae42f4 req-9a27f856-1277-44b6-adf6-6918bd3ae0a7 service nova] Releasing lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.152285] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Releasing lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.152496] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 617.152687] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 617.153024] env[62923]: DEBUG oslo_concurrency.lockutils [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] Acquired lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.153216] env[62923]: DEBUG nova.network.neutron [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Refreshing network info cache for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 617.154306] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a681e43f-a477-4e3a-b43b-06c6189479d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.164270] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a407a7eb-a166-4653-8248-f0be906a31fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.184942] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1 could not be found. [ 617.185176] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 617.185359] env[62923]: INFO nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 617.185585] env[62923]: DEBUG oslo.service.loopingcall [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.185781] env[62923]: DEBUG nova.compute.manager [-] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 617.185875] env[62923]: DEBUG nova.network.neutron [-] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 617.209287] env[62923]: DEBUG nova.network.neutron [-] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.245029] env[62923]: DEBUG nova.scheduler.client.report [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.290938] env[62923]: DEBUG nova.compute.utils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.293033] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 617.293258] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.346231] env[62923]: DEBUG nova.policy [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96630105912a494a88aae403f199a86c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7848d04b15b14cc4ae623b2d20cd2a7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 617.678040] env[62923]: DEBUG nova.network.neutron [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.711869] env[62923]: DEBUG nova.network.neutron [-] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.741098] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Successfully created port: b5de7c52-b6f9-458d-ad88-8dc8b5193462 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.749745] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.968s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.750343] env[62923]: ERROR nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Traceback (most recent call last): [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self.driver.spawn(context, instance, image_meta, [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] vm_ref = self.build_virtual_machine(instance, [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.750343] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] for vif in network_info: [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return self._sync_wrapper(fn, *args, **kwargs) [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self.wait() [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self[:] = self._gt.wait() [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return self._exit_event.wait() [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] result = hub.switch() [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.750846] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return self.greenlet.switch() [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] result = function(*args, **kwargs) [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] return func(*args, **kwargs) [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] raise e [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] nwinfo = self.network_api.allocate_for_instance( [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] created_port_ids = self._update_ports_for_instance( [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] with excutils.save_and_reraise_exception(): [ 617.751422] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] self.force_reraise() [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] raise self.value [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] updated_port = self._update_port( [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] _ensure_no_port_binding_failure(port) [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] raise exception.PortBindingFailed(port_id=port['id']) [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] nova.exception.PortBindingFailed: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. [ 617.751909] env[62923]: ERROR nova.compute.manager [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] [ 617.752198] env[62923]: DEBUG nova.compute.utils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.752671] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.680s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.755738] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Build of instance 2a8c7285-35dd-4112-b84a-ea384aead074 was re-scheduled: Binding failed for port 9d162aad-0e04-43ff-85c9-d731e7490d5d, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.756838] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.756838] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquiring lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.756838] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Acquired lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.756838] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.780299] env[62923]: DEBUG nova.network.neutron [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.795073] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 618.214714] env[62923]: INFO nova.compute.manager [-] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Took 1.03 seconds to deallocate network for instance. [ 618.217495] env[62923]: DEBUG nova.compute.claims [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 618.217668] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.279854] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.281797] env[62923]: DEBUG oslo_concurrency.lockutils [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] Releasing lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.282053] env[62923]: DEBUG nova.compute.manager [req-ed2c5de1-118a-4a2e-b65a-525940183992 req-63ee91a6-1599-438b-93f0-eb079831cd04 service nova] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Received event network-vif-deleted-573f86dc-c22b-42d3-8a61-5022ae6fc36c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.441517] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.705755] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56310761-09cb-42dd-abf0-59bc62cb3b15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.713508] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04aaa52-6007-47e1-b456-ad22632c0454 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.745097] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd522e9b-b914-44bf-9b29-3acafb59b967 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.752200] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe9dcae-5469-4b6c-a588-c35c99607a26 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.765127] env[62923]: DEBUG nova.compute.provider_tree [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.804867] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 618.807547] env[62923]: DEBUG nova.compute.manager [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Received event network-changed-b5de7c52-b6f9-458d-ad88-8dc8b5193462 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.807761] env[62923]: DEBUG nova.compute.manager [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Refreshing instance network info cache due to event network-changed-b5de7c52-b6f9-458d-ad88-8dc8b5193462. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 618.808042] env[62923]: DEBUG oslo_concurrency.lockutils [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] Acquiring lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.808128] env[62923]: DEBUG oslo_concurrency.lockutils [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] Acquired lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.808274] env[62923]: DEBUG nova.network.neutron [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Refreshing network info cache for port b5de7c52-b6f9-458d-ad88-8dc8b5193462 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.832964] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.833240] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.833406] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.833584] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.833726] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.833866] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.834598] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.834839] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.834964] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.835180] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.835375] env[62923]: DEBUG nova.virt.hardware [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.836439] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2049f7-ff44-470d-8fe0-edf020ef2d75 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.844868] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748e797f-d0b2-43ec-85a6-52eb6831c949 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.948019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Releasing lock "refresh_cache-2a8c7285-35dd-4112-b84a-ea384aead074" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.948019] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.948019] env[62923]: DEBUG nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.948239] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.965266] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.985738] env[62923]: ERROR nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 618.985738] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.985738] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.985738] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.985738] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.985738] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.985738] env[62923]: ERROR nova.compute.manager raise self.value [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.985738] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 618.985738] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.985738] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 618.986191] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.986191] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 618.986191] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 618.986191] env[62923]: ERROR nova.compute.manager [ 618.986191] env[62923]: Traceback (most recent call last): [ 618.986191] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 618.986191] env[62923]: listener.cb(fileno) [ 618.986191] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.986191] env[62923]: result = function(*args, **kwargs) [ 618.986191] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.986191] env[62923]: return func(*args, **kwargs) [ 618.986191] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.986191] env[62923]: raise e [ 618.986191] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.986191] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 618.986191] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.986191] env[62923]: created_port_ids = self._update_ports_for_instance( [ 618.986191] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.986191] env[62923]: with excutils.save_and_reraise_exception(): [ 618.986191] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.986191] env[62923]: self.force_reraise() [ 618.986191] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.986191] env[62923]: raise self.value [ 618.986191] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.986191] env[62923]: updated_port = self._update_port( [ 618.986191] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.986191] env[62923]: _ensure_no_port_binding_failure(port) [ 618.986191] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.986191] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 618.986945] env[62923]: nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 618.986945] env[62923]: Removing descriptor: 21 [ 618.986945] env[62923]: ERROR nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Traceback (most recent call last): [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] yield resources [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self.driver.spawn(context, instance, image_meta, [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.986945] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] vm_ref = self.build_virtual_machine(instance, [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] for vif in network_info: [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return self._sync_wrapper(fn, *args, **kwargs) [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self.wait() [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self[:] = self._gt.wait() [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return self._exit_event.wait() [ 618.987286] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] result = hub.switch() [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return self.greenlet.switch() [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] result = function(*args, **kwargs) [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return func(*args, **kwargs) [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] raise e [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] nwinfo = self.network_api.allocate_for_instance( [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.987618] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] created_port_ids = self._update_ports_for_instance( [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] with excutils.save_and_reraise_exception(): [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self.force_reraise() [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] raise self.value [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] updated_port = self._update_port( [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] _ensure_no_port_binding_failure(port) [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.987953] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] raise exception.PortBindingFailed(port_id=port['id']) [ 618.988300] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 618.988300] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] [ 618.988300] env[62923]: INFO nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Terminating instance [ 618.989192] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquiring lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.268306] env[62923]: DEBUG nova.scheduler.client.report [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.333314] env[62923]: DEBUG nova.network.neutron [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.426683] env[62923]: DEBUG nova.network.neutron [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.470150] env[62923]: DEBUG nova.network.neutron [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.773383] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.021s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.774065] env[62923]: ERROR nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Traceback (most recent call last): [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self.driver.spawn(context, instance, image_meta, [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] vm_ref = self.build_virtual_machine(instance, [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.774065] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] for vif in network_info: [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return self._sync_wrapper(fn, *args, **kwargs) [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self.wait() [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self[:] = self._gt.wait() [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return self._exit_event.wait() [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] result = hub.switch() [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.774411] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return self.greenlet.switch() [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] result = function(*args, **kwargs) [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] return func(*args, **kwargs) [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] raise e [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] nwinfo = self.network_api.allocate_for_instance( [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] created_port_ids = self._update_ports_for_instance( [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] with excutils.save_and_reraise_exception(): [ 619.774783] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] self.force_reraise() [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] raise self.value [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] updated_port = self._update_port( [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] _ensure_no_port_binding_failure(port) [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] raise exception.PortBindingFailed(port_id=port['id']) [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] nova.exception.PortBindingFailed: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. [ 619.775142] env[62923]: ERROR nova.compute.manager [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] [ 619.775519] env[62923]: DEBUG nova.compute.utils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 619.775973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.030s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.779047] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Build of instance ab0f02a1-f883-4ad6-8f8c-5c300fff0f70 was re-scheduled: Binding failed for port 4b93a39a-68b5-4df8-836c-65d9d217dd6b, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 619.779425] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 619.779667] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquiring lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.779809] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Acquired lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.779965] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.929084] env[62923]: DEBUG oslo_concurrency.lockutils [req-bc52a59f-d0ad-4a75-9a9b-a4d748c9a6cd req-d1d3d038-9a2a-45d0-a25a-dbdf7116bc24 service nova] Releasing lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.929505] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquired lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.929698] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.972489] env[62923]: INFO nova.compute.manager [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] [instance: 2a8c7285-35dd-4112-b84a-ea384aead074] Took 1.02 seconds to deallocate network for instance. [ 620.316523] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.406537] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.445691] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.709629] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83630594-79fd-455e-9e25-70c9bb029ae4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.714883] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.719869] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428b0e7a-5e52-49e1-b09e-44124428033a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.757471] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074090a5-3f7c-474f-811c-f6e9bdecd285 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.769855] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f8edc8-224c-4961-820e-7dc5d7bc76c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.784791] env[62923]: DEBUG nova.compute.provider_tree [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.911133] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Releasing lock "refresh_cache-ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.911646] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 620.911646] env[62923]: DEBUG nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.911753] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.935344] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.948414] env[62923]: DEBUG nova.compute.manager [req-8b860e57-d130-435b-97b7-e993a316ec06 req-30af5304-8787-496d-ab4a-d05acca16e19 service nova] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Received event network-vif-deleted-b5de7c52-b6f9-458d-ad88-8dc8b5193462 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 621.002670] env[62923]: INFO nova.scheduler.client.report [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Deleted allocations for instance 2a8c7285-35dd-4112-b84a-ea384aead074 [ 621.220678] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Releasing lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.221142] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 621.221337] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 621.221637] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-348120a2-b851-4b04-9849-743709e8debf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.230680] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51cff0e-f3c1-4d62-8d12-17b429e297d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.251495] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5a825d36-7563-4792-8f68-8814ec96cfde could not be found. [ 621.251716] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 621.251889] env[62923]: INFO nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Took 0.03 seconds to destroy the instance on the hypervisor. [ 621.252134] env[62923]: DEBUG oslo.service.loopingcall [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 621.252366] env[62923]: DEBUG nova.compute.manager [-] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 621.252466] env[62923]: DEBUG nova.network.neutron [-] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 621.268337] env[62923]: DEBUG nova.network.neutron [-] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.287729] env[62923]: DEBUG nova.scheduler.client.report [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.438563] env[62923]: DEBUG nova.network.neutron [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.514277] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51992d1d-6e80-47c7-83cd-bd89c992229d tempest-ServerAddressesTestJSON-490820550 tempest-ServerAddressesTestJSON-490820550-project-member] Lock "2a8c7285-35dd-4112-b84a-ea384aead074" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.777s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.770443] env[62923]: DEBUG nova.network.neutron [-] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.791990] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.016s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.792636] env[62923]: ERROR nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Traceback (most recent call last): [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self.driver.spawn(context, instance, image_meta, [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] vm_ref = self.build_virtual_machine(instance, [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.792636] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] for vif in network_info: [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return self._sync_wrapper(fn, *args, **kwargs) [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self.wait() [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self[:] = self._gt.wait() [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return self._exit_event.wait() [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] result = hub.switch() [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.793018] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return self.greenlet.switch() [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] result = function(*args, **kwargs) [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] return func(*args, **kwargs) [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] raise e [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] nwinfo = self.network_api.allocate_for_instance( [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] created_port_ids = self._update_ports_for_instance( [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] with excutils.save_and_reraise_exception(): [ 621.793378] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] self.force_reraise() [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] raise self.value [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] updated_port = self._update_port( [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] _ensure_no_port_binding_failure(port) [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] raise exception.PortBindingFailed(port_id=port['id']) [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] nova.exception.PortBindingFailed: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. [ 621.793716] env[62923]: ERROR nova.compute.manager [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] [ 621.794082] env[62923]: DEBUG nova.compute.utils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 621.794844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.544s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.799203] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Build of instance 353b72b2-cd56-442f-9010-c75baf8f5a48 was re-scheduled: Binding failed for port 1221c65a-3610-477c-97f6-0202d2be27cb, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.799203] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.799203] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquiring lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.799203] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Acquired lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.799347] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.941591] env[62923]: INFO nova.compute.manager [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] [instance: ab0f02a1-f883-4ad6-8f8c-5c300fff0f70] Took 1.03 seconds to deallocate network for instance. [ 622.017592] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 622.275520] env[62923]: INFO nova.compute.manager [-] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Took 1.02 seconds to deallocate network for instance. [ 622.277797] env[62923]: DEBUG nova.compute.claims [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 622.277976] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.320258] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.507869] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.541218] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.710903] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5733ef41-b70c-470d-b214-4fce803b0ee5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.718803] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5ee33d-85db-4d94-8a37-7b8a13f7e966 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.751429] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca9d067-2fc2-479c-9317-b0f576748442 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.760913] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e12416c-6ae7-47ee-adb5-061394dd3752 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.772007] env[62923]: DEBUG nova.compute.provider_tree [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.988596] env[62923]: INFO nova.scheduler.client.report [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Deleted allocations for instance ab0f02a1-f883-4ad6-8f8c-5c300fff0f70 [ 623.012040] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Releasing lock "refresh_cache-353b72b2-cd56-442f-9010-c75baf8f5a48" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.012040] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 623.012040] env[62923]: DEBUG nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.012040] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.043102] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.278288] env[62923]: DEBUG nova.scheduler.client.report [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.499373] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09634c64-b594-478c-b1bf-d4adf8c1e550 tempest-ImagesNegativeTestJSON-1252702591 tempest-ImagesNegativeTestJSON-1252702591-project-member] Lock "ab0f02a1-f883-4ad6-8f8c-5c300fff0f70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.556s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.546838] env[62923]: DEBUG nova.network.neutron [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.783922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.989s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.784599] env[62923]: ERROR nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Traceback (most recent call last): [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self.driver.spawn(context, instance, image_meta, [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] vm_ref = self.build_virtual_machine(instance, [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.784599] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] for vif in network_info: [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return self._sync_wrapper(fn, *args, **kwargs) [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self.wait() [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self[:] = self._gt.wait() [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return self._exit_event.wait() [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] result = hub.switch() [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.784915] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return self.greenlet.switch() [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] result = function(*args, **kwargs) [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] return func(*args, **kwargs) [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] raise e [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] nwinfo = self.network_api.allocate_for_instance( [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] created_port_ids = self._update_ports_for_instance( [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] with excutils.save_and_reraise_exception(): [ 623.785266] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] self.force_reraise() [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] raise self.value [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] updated_port = self._update_port( [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] _ensure_no_port_binding_failure(port) [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] raise exception.PortBindingFailed(port_id=port['id']) [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] nova.exception.PortBindingFailed: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. [ 623.785620] env[62923]: ERROR nova.compute.manager [instance: 03ee0097-1200-43ce-9baa-e9da80105516] [ 623.785920] env[62923]: DEBUG nova.compute.utils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 623.786557] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.562s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.789326] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Build of instance 03ee0097-1200-43ce-9baa-e9da80105516 was re-scheduled: Binding failed for port f53dec6a-8c2a-461a-9555-1212dc01ba74, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 623.789728] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 623.789940] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquiring lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.790094] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Acquired lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.790251] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.004424] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 624.049124] env[62923]: INFO nova.compute.manager [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] [instance: 353b72b2-cd56-442f-9010-c75baf8f5a48] Took 1.04 seconds to deallocate network for instance. [ 624.323370] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.531092] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.532950] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.699054] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94707f44-cde9-4cdb-b25f-180b88572b31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.710174] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7994fc01-f107-4f34-9ccf-7b36d8b5a922 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.739246] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d106de-6eb3-4923-8664-d3cadb53c8df {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.746490] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5a18a3-23bf-43e6-a330-4576c77b2e0b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.759340] env[62923]: DEBUG nova.compute.provider_tree [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.035256] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Releasing lock "refresh_cache-03ee0097-1200-43ce-9baa-e9da80105516" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.035558] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 625.035709] env[62923]: DEBUG nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 625.035873] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.057250] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.083458] env[62923]: INFO nova.scheduler.client.report [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Deleted allocations for instance 353b72b2-cd56-442f-9010-c75baf8f5a48 [ 625.262245] env[62923]: DEBUG nova.scheduler.client.report [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.564405] env[62923]: DEBUG nova.network.neutron [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.590789] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fc44d69f-7b43-453c-b00e-9a4efc2f0409 tempest-ServerActionsTestOtherB-793892656 tempest-ServerActionsTestOtherB-793892656-project-member] Lock "353b72b2-cd56-442f-9010-c75baf8f5a48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.007s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.767164] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.980s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.767860] env[62923]: ERROR nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Traceback (most recent call last): [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self.driver.spawn(context, instance, image_meta, [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] vm_ref = self.build_virtual_machine(instance, [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.767860] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] for vif in network_info: [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return self._sync_wrapper(fn, *args, **kwargs) [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self.wait() [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self[:] = self._gt.wait() [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return self._exit_event.wait() [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] result = hub.switch() [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.768202] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return self.greenlet.switch() [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] result = function(*args, **kwargs) [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] return func(*args, **kwargs) [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] raise e [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] nwinfo = self.network_api.allocate_for_instance( [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] created_port_ids = self._update_ports_for_instance( [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] with excutils.save_and_reraise_exception(): [ 625.768483] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] self.force_reraise() [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] raise self.value [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] updated_port = self._update_port( [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] _ensure_no_port_binding_failure(port) [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] raise exception.PortBindingFailed(port_id=port['id']) [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] nova.exception.PortBindingFailed: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. [ 625.768764] env[62923]: ERROR nova.compute.manager [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] [ 625.769012] env[62923]: DEBUG nova.compute.utils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 625.769758] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.468s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.771389] env[62923]: INFO nova.compute.claims [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.774015] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Build of instance 0dba167b-aa56-4463-9749-b74fbc7430d9 was re-scheduled: Binding failed for port 648493d0-da21-4752-941b-c298fb95d7e4, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 625.774618] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 625.775068] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.775068] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.775201] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 626.067041] env[62923]: INFO nova.compute.manager [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] [instance: 03ee0097-1200-43ce-9baa-e9da80105516] Took 1.03 seconds to deallocate network for instance. [ 626.092767] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 626.298207] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.454532] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.626410] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.956561] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-0dba167b-aa56-4463-9749-b74fbc7430d9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.956791] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 626.956969] env[62923]: DEBUG nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 626.959028] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.989192] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.106138] env[62923]: INFO nova.scheduler.client.report [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Deleted allocations for instance 03ee0097-1200-43ce-9baa-e9da80105516 [ 627.211021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c7c85f-057a-4589-814d-e5423767f2ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.217721] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020c3105-2583-4991-a689-e2a075b0e008 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.249489] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ce7084-6873-46f3-b6ab-d7023b7337ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.257445] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68c4797-f5bf-4d6c-a186-7990094928df {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.271501] env[62923]: DEBUG nova.compute.provider_tree [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.494450] env[62923]: DEBUG nova.network.neutron [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.616082] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6546f21d-db73-488e-a76c-fb68595deebe tempest-FloatingIPsAssociationNegativeTestJSON-1981902413 tempest-FloatingIPsAssociationNegativeTestJSON-1981902413-project-member] Lock "03ee0097-1200-43ce-9baa-e9da80105516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.535s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.777149] env[62923]: DEBUG nova.scheduler.client.report [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 627.998253] env[62923]: INFO nova.compute.manager [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 0dba167b-aa56-4463-9749-b74fbc7430d9] Took 1.04 seconds to deallocate network for instance. [ 628.119241] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 628.283896] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.283896] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 628.287312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.579s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.288309] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.288612] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 628.289096] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.945s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.295920] env[62923]: INFO nova.compute.claims [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.300666] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dea3f35-8ecb-4164-8b20-0a89f6838d8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.310473] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca2ee2f-a0d4-4be6-a372-f8af6b4da299 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.330315] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e5f662-e8df-449f-8273-1c526b7b109c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.337265] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23907661-3b71-420b-a1f2-73b6b2d067f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.369872] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181493MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 628.370067] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.646924] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.789963] env[62923]: DEBUG nova.compute.utils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.794574] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 628.794972] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 628.857887] env[62923]: DEBUG nova.policy [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7a44765166242d2b39871e8c2fb13a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab01f1a9bae54a7cbd3237d68e8ea5fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 629.052245] env[62923]: INFO nova.scheduler.client.report [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocations for instance 0dba167b-aa56-4463-9749-b74fbc7430d9 [ 629.300251] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 629.327711] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Successfully created port: c4824b52-af57-4910-85a8-7621d454e216 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.566685] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c35edd6f-fd0b-4322-95c2-1fe4ff430f58 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "0dba167b-aa56-4463-9749-b74fbc7430d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.961s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.764662] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cacbb8-2806-4b4b-a726-1417cc5b7bf2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.773259] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8341d4a-b811-42ad-98c3-c99edf32e55d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.805798] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b951036-39a3-4a83-9feb-fcef1094d276 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.821862] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2098ec5-762b-4287-a3a8-2d86e40f0622 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.836506] env[62923]: DEBUG nova.compute.provider_tree [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.069565] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 630.178266] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquiring lock "bf0222ef-b86f-4d85-ab75-96661b90a4b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.178631] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Lock "bf0222ef-b86f-4d85-ab75-96661b90a4b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.322845] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 630.342305] env[62923]: DEBUG nova.scheduler.client.report [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 630.358701] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 630.359618] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 630.359618] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.359618] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 630.359618] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.359618] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 630.360387] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 630.360387] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 630.360501] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 630.360600] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 630.360959] env[62923]: DEBUG nova.virt.hardware [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 630.362068] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d004514f-0df5-49b4-9ebd-dc89b9ce476a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.371758] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b81d8a-05d6-4fbe-b3fe-6ac303ade5d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.598722] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.854020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.854020] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 630.855643] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.366104] env[62923]: DEBUG nova.compute.utils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.370789] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.371135] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.375122] env[62923]: DEBUG nova.compute.manager [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Received event network-changed-c4824b52-af57-4910-85a8-7621d454e216 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 631.375456] env[62923]: DEBUG nova.compute.manager [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Refreshing instance network info cache due to event network-changed-c4824b52-af57-4910-85a8-7621d454e216. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 631.375773] env[62923]: DEBUG oslo_concurrency.lockutils [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] Acquiring lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.376422] env[62923]: DEBUG oslo_concurrency.lockutils [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] Acquired lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.376912] env[62923]: DEBUG nova.network.neutron [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Refreshing network info cache for port c4824b52-af57-4910-85a8-7621d454e216 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 631.514170] env[62923]: DEBUG nova.policy [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '688302aa32204b96b226e1b639ee49e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d9bbc5824294347b266ce8a38db452f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 631.556366] env[62923]: ERROR nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 631.556366] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 631.556366] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 631.556366] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 631.556366] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 631.556366] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 631.556366] env[62923]: ERROR nova.compute.manager raise self.value [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 631.556366] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 631.556366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 631.556366] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 631.557070] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 631.557070] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 631.557070] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 631.557070] env[62923]: ERROR nova.compute.manager [ 631.557070] env[62923]: Traceback (most recent call last): [ 631.557070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 631.557070] env[62923]: listener.cb(fileno) [ 631.557070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 631.557070] env[62923]: result = function(*args, **kwargs) [ 631.557070] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 631.557070] env[62923]: return func(*args, **kwargs) [ 631.557070] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 631.557070] env[62923]: raise e [ 631.557070] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 631.557070] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 631.557070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 631.557070] env[62923]: created_port_ids = self._update_ports_for_instance( [ 631.557070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 631.557070] env[62923]: with excutils.save_and_reraise_exception(): [ 631.557070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 631.557070] env[62923]: self.force_reraise() [ 631.557070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 631.557070] env[62923]: raise self.value [ 631.557070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 631.557070] env[62923]: updated_port = self._update_port( [ 631.557070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 631.557070] env[62923]: _ensure_no_port_binding_failure(port) [ 631.557070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 631.557070] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 631.558476] env[62923]: nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 631.558476] env[62923]: Removing descriptor: 21 [ 631.558476] env[62923]: ERROR nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Traceback (most recent call last): [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] yield resources [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self.driver.spawn(context, instance, image_meta, [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 631.558476] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] vm_ref = self.build_virtual_machine(instance, [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] for vif in network_info: [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return self._sync_wrapper(fn, *args, **kwargs) [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self.wait() [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self[:] = self._gt.wait() [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return self._exit_event.wait() [ 631.559119] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] result = hub.switch() [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return self.greenlet.switch() [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] result = function(*args, **kwargs) [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return func(*args, **kwargs) [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] raise e [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] nwinfo = self.network_api.allocate_for_instance( [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 631.559633] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] created_port_ids = self._update_ports_for_instance( [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] with excutils.save_and_reraise_exception(): [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self.force_reraise() [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] raise self.value [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] updated_port = self._update_port( [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] _ensure_no_port_binding_failure(port) [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 631.560332] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] raise exception.PortBindingFailed(port_id=port['id']) [ 631.560794] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 631.560794] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] [ 631.560794] env[62923]: INFO nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Terminating instance [ 631.560794] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquiring lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.626729] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "ac14f710-41c0-429c-92a3-46acceace3fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.626959] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "ac14f710-41c0-429c-92a3-46acceace3fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.834104] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d83ab26-9e9f-4032-ab38-5feee4555b47 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.844801] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7339944-ab32-49d3-983c-aba0f969f150 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.880642] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34564c32-8efe-4ef6-8068-3f13b18d70b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.883636] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 631.896409] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c8f22c-8e53-42c9-84b4-dd1693f84b26 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.911585] env[62923]: DEBUG nova.compute.provider_tree [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.925163] env[62923]: DEBUG nova.network.neutron [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.090494] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Successfully created port: eee5f468-9531-469a-8dbe-eed2faf79c66 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.140922] env[62923]: DEBUG nova.network.neutron [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.415537] env[62923]: DEBUG nova.scheduler.client.report [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.643694] env[62923]: DEBUG oslo_concurrency.lockutils [req-7ec4c115-8f2a-402d-853b-809a31d0d6dc req-d73d4339-bb41-4182-89fd-d93ef4b386da service nova] Releasing lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.644159] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquired lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.644351] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.898556] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 632.925023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.067s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.925023] env[62923]: ERROR nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Traceback (most recent call last): [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self.driver.spawn(context, instance, image_meta, [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.925023] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] vm_ref = self.build_virtual_machine(instance, [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] for vif in network_info: [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return self._sync_wrapper(fn, *args, **kwargs) [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self.wait() [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self[:] = self._gt.wait() [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return self._exit_event.wait() [ 632.925395] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] result = hub.switch() [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return self.greenlet.switch() [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] result = function(*args, **kwargs) [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] return func(*args, **kwargs) [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] raise e [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] nwinfo = self.network_api.allocate_for_instance( [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 632.925793] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] created_port_ids = self._update_ports_for_instance( [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] with excutils.save_and_reraise_exception(): [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] self.force_reraise() [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] raise self.value [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] updated_port = self._update_port( [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] _ensure_no_port_binding_failure(port) [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.926744] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] raise exception.PortBindingFailed(port_id=port['id']) [ 632.927045] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] nova.exception.PortBindingFailed: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. [ 632.927045] env[62923]: ERROR nova.compute.manager [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] [ 632.927045] env[62923]: DEBUG nova.compute.utils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.927045] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.708s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.935381] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.935603] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.935760] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.935941] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.936160] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.936254] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.936451] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.936607] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.936769] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.936927] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.937145] env[62923]: DEBUG nova.virt.hardware [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.937665] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Build of instance 386ffe8a-a160-4dea-88e6-529219eaf99f was re-scheduled: Binding failed for port e82cc592-0f33-4197-9751-24e4dfc31e08, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 632.938125] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 632.938382] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquiring lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.938535] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Acquired lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.938692] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.942762] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273901d3-8882-40f9-a888-e13ddb5fc1f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.950593] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80763121-ab80-4798-bd0f-ce3df1e3ba24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.046686] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquiring lock "78daba16-0c0f-4db6-bde1-70d960a6e7ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.046914] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Lock "78daba16-0c0f-4db6-bde1-70d960a6e7ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.172163] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.318361] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.403828] env[62923]: DEBUG nova.compute.manager [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Received event network-vif-deleted-c4824b52-af57-4910-85a8-7621d454e216 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 633.404203] env[62923]: DEBUG nova.compute.manager [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Received event network-changed-eee5f468-9531-469a-8dbe-eed2faf79c66 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 633.405138] env[62923]: DEBUG nova.compute.manager [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Refreshing instance network info cache due to event network-changed-eee5f468-9531-469a-8dbe-eed2faf79c66. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 633.405138] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] Acquiring lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.405519] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] Acquired lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.405933] env[62923]: DEBUG nova.network.neutron [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Refreshing network info cache for port eee5f468-9531-469a-8dbe-eed2faf79c66 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 633.471255] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.562894] env[62923]: ERROR nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 633.562894] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.562894] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.562894] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.562894] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.562894] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.562894] env[62923]: ERROR nova.compute.manager raise self.value [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.562894] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 633.562894] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.562894] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 633.563335] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.563335] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 633.563335] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 633.563335] env[62923]: ERROR nova.compute.manager [ 633.563335] env[62923]: Traceback (most recent call last): [ 633.563335] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 633.563335] env[62923]: listener.cb(fileno) [ 633.563335] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.563335] env[62923]: result = function(*args, **kwargs) [ 633.563335] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.563335] env[62923]: return func(*args, **kwargs) [ 633.563335] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.563335] env[62923]: raise e [ 633.563335] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.563335] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 633.563335] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.563335] env[62923]: created_port_ids = self._update_ports_for_instance( [ 633.563335] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.563335] env[62923]: with excutils.save_and_reraise_exception(): [ 633.563335] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.563335] env[62923]: self.force_reraise() [ 633.563335] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.563335] env[62923]: raise self.value [ 633.563335] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.563335] env[62923]: updated_port = self._update_port( [ 633.563335] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.563335] env[62923]: _ensure_no_port_binding_failure(port) [ 633.563335] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.563335] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 633.564162] env[62923]: nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 633.564162] env[62923]: Removing descriptor: 17 [ 633.564162] env[62923]: ERROR nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Traceback (most recent call last): [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] yield resources [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self.driver.spawn(context, instance, image_meta, [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.564162] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] vm_ref = self.build_virtual_machine(instance, [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] for vif in network_info: [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return self._sync_wrapper(fn, *args, **kwargs) [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self.wait() [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self[:] = self._gt.wait() [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return self._exit_event.wait() [ 633.565392] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] result = hub.switch() [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return self.greenlet.switch() [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] result = function(*args, **kwargs) [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return func(*args, **kwargs) [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] raise e [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] nwinfo = self.network_api.allocate_for_instance( [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.565716] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] created_port_ids = self._update_ports_for_instance( [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] with excutils.save_and_reraise_exception(): [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self.force_reraise() [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] raise self.value [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] updated_port = self._update_port( [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] _ensure_no_port_binding_failure(port) [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.566052] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] raise exception.PortBindingFailed(port_id=port['id']) [ 633.566450] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 633.566450] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] [ 633.566450] env[62923]: INFO nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Terminating instance [ 633.571008] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquiring lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.628265] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.822068] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Releasing lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.822725] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 633.822980] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.823665] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-664a784e-70ee-4494-9ecf-2035f2cf7626 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.837149] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d21ded-809a-422c-b946-5b79bf3d21de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.866676] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab could not be found. [ 633.867088] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.867181] env[62923]: INFO nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Took 0.04 seconds to destroy the instance on the hypervisor. [ 633.867432] env[62923]: DEBUG oslo.service.loopingcall [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.867656] env[62923]: DEBUG nova.compute.manager [-] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 633.867748] env[62923]: DEBUG nova.network.neutron [-] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.885862] env[62923]: DEBUG nova.network.neutron [-] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.927141] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4d1bad-3110-47cb-a465-6eed2b8071d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.931275] env[62923]: DEBUG nova.network.neutron [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.938449] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e275ed2b-33d3-46cd-b8c7-9c093d6d414b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.969724] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d7ba2a-3d63-4a4a-97e0-5be006d32ab4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.978597] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d8c4c5-9d39-4b10-a750-724bae3066f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.998101] env[62923]: DEBUG nova.compute.provider_tree [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.007151] env[62923]: DEBUG nova.network.neutron [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.131429] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Releasing lock "refresh_cache-386ffe8a-a160-4dea-88e6-529219eaf99f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.131676] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 634.131844] env[62923]: DEBUG nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.132012] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.154225] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.390129] env[62923]: DEBUG nova.network.neutron [-] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.504297] env[62923]: DEBUG nova.scheduler.client.report [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 634.511026] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d3c0711-ac78-417d-9d08-cd9e4c84e6e9 req-0b53d2a5-4cfc-48c7-9634-92434287317f service nova] Releasing lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.511026] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquired lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.511026] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.657694] env[62923]: DEBUG nova.network.neutron [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.893261] env[62923]: INFO nova.compute.manager [-] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Took 1.03 seconds to deallocate network for instance. [ 634.897339] env[62923]: DEBUG nova.compute.claims [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 634.897505] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.011737] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.086s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.012400] env[62923]: ERROR nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Traceback (most recent call last): [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self.driver.spawn(context, instance, image_meta, [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] vm_ref = self.build_virtual_machine(instance, [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.012400] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] for vif in network_info: [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return self._sync_wrapper(fn, *args, **kwargs) [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self.wait() [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self[:] = self._gt.wait() [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return self._exit_event.wait() [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] result = hub.switch() [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.012792] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return self.greenlet.switch() [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] result = function(*args, **kwargs) [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] return func(*args, **kwargs) [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] raise e [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] nwinfo = self.network_api.allocate_for_instance( [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] created_port_ids = self._update_ports_for_instance( [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] with excutils.save_and_reraise_exception(): [ 635.013130] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] self.force_reraise() [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] raise self.value [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] updated_port = self._update_port( [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] _ensure_no_port_binding_failure(port) [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] raise exception.PortBindingFailed(port_id=port['id']) [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] nova.exception.PortBindingFailed: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. [ 635.013524] env[62923]: ERROR nova.compute.manager [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] [ 635.013808] env[62923]: DEBUG nova.compute.utils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.014919] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.737s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.021039] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Build of instance a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1 was re-scheduled: Binding failed for port 573f86dc-c22b-42d3-8a61-5022ae6fc36c, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 635.021137] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 635.021437] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquiring lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.021586] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Acquired lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.021736] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.044476] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.161554] env[62923]: INFO nova.compute.manager [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] [instance: 386ffe8a-a160-4dea-88e6-529219eaf99f] Took 1.03 seconds to deallocate network for instance. [ 635.176229] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.425076] env[62923]: DEBUG nova.compute.manager [req-630a4a17-ab28-44b2-b1d6-8639b9875fc4 req-902ef130-6780-480e-b60a-c5c14093fb6c service nova] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Received event network-vif-deleted-eee5f468-9531-469a-8dbe-eed2faf79c66 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 635.504210] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquiring lock "7831bfb8-b336-4338-923f-c759a5c67c06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.504365] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Lock "7831bfb8-b336-4338-923f-c759a5c67c06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.541105] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.597580] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.678040] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Releasing lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.678499] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 635.678704] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.678979] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93e71e92-21ad-47fe-b628-479a84d6db5a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.688104] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a98dc8-b4a0-4955-aefc-ab03b8322e05 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.715938] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f106b311-fc2f-4811-b7e0-d680de236b78 could not be found. [ 635.716343] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.716628] env[62923]: INFO nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Took 0.04 seconds to destroy the instance on the hypervisor. [ 635.716892] env[62923]: DEBUG oslo.service.loopingcall [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.720862] env[62923]: DEBUG nova.compute.manager [-] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 635.720862] env[62923]: DEBUG nova.network.neutron [-] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.742300] env[62923]: DEBUG nova.network.neutron [-] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.940792] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97d5200-6f20-409d-8440-8eb54a132fbd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.948350] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031e6370-9eed-4663-910c-c7ce5c3368e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.977735] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6299e6e-680d-4419-96f9-ecec5dacb868 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.985097] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeaa507-84e2-4055-a414-cf7795987dd1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.998595] env[62923]: DEBUG nova.compute.provider_tree [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.101346] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Releasing lock "refresh_cache-a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.101593] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 636.101755] env[62923]: DEBUG nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 636.101918] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.132345] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.190770] env[62923]: INFO nova.scheduler.client.report [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Deleted allocations for instance 386ffe8a-a160-4dea-88e6-529219eaf99f [ 636.242992] env[62923]: DEBUG nova.network.neutron [-] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.502236] env[62923]: DEBUG nova.scheduler.client.report [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.635610] env[62923]: DEBUG nova.network.neutron [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.699949] env[62923]: DEBUG oslo_concurrency.lockutils [None req-700b925a-101b-4daa-be96-c26542ca335c tempest-AttachInterfacesUnderV243Test-1965228745 tempest-AttachInterfacesUnderV243Test-1965228745-project-member] Lock "386ffe8a-a160-4dea-88e6-529219eaf99f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.745s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.745038] env[62923]: INFO nova.compute.manager [-] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Took 1.02 seconds to deallocate network for instance. [ 636.749799] env[62923]: DEBUG nova.compute.claims [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 636.749978] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.008666] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.994s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.009235] env[62923]: ERROR nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Traceback (most recent call last): [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self.driver.spawn(context, instance, image_meta, [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] vm_ref = self.build_virtual_machine(instance, [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] vif_infos = vmwarevif.get_vif_info(self._session, [ 637.009235] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] for vif in network_info: [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return self._sync_wrapper(fn, *args, **kwargs) [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self.wait() [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self[:] = self._gt.wait() [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return self._exit_event.wait() [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] result = hub.switch() [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 637.009688] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return self.greenlet.switch() [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] result = function(*args, **kwargs) [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] return func(*args, **kwargs) [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] raise e [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] nwinfo = self.network_api.allocate_for_instance( [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] created_port_ids = self._update_ports_for_instance( [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] with excutils.save_and_reraise_exception(): [ 637.010839] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] self.force_reraise() [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] raise self.value [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] updated_port = self._update_port( [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] _ensure_no_port_binding_failure(port) [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] raise exception.PortBindingFailed(port_id=port['id']) [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] nova.exception.PortBindingFailed: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. [ 637.011323] env[62923]: ERROR nova.compute.manager [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] [ 637.011611] env[62923]: DEBUG nova.compute.utils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 637.011611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.470s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.012629] env[62923]: INFO nova.compute.claims [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.015250] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Build of instance 5a825d36-7563-4792-8f68-8814ec96cfde was re-scheduled: Binding failed for port b5de7c52-b6f9-458d-ad88-8dc8b5193462, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 637.015678] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 637.015894] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquiring lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.016046] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Acquired lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.016197] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.138528] env[62923]: INFO nova.compute.manager [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] [instance: a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1] Took 1.04 seconds to deallocate network for instance. [ 637.203112] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 637.546393] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.726724] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.807992] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.167972] env[62923]: INFO nova.scheduler.client.report [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Deleted allocations for instance a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1 [ 638.311975] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Releasing lock "refresh_cache-5a825d36-7563-4792-8f68-8814ec96cfde" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.311975] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 638.311975] env[62923]: DEBUG nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.312171] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 638.332565] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.432255] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf94d12-75da-4bcd-b053-0c04058fd6fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.440252] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efd8e7c-79aa-4816-9cb9-e39800e793b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.472827] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81751a5-b825-4565-9f0f-86a12dcfc7eb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.480753] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cbe949-44ec-4dc5-9f1d-bb35e635cf90 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.498226] env[62923]: DEBUG nova.compute.provider_tree [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.675895] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95cfc0f0-67f3-45e3-929f-0ed843f7e3c3 tempest-DeleteServersAdminTestJSON-1531813717 tempest-DeleteServersAdminTestJSON-1531813717-project-member] Lock "a8e4d6d3-3908-4fe8-be33-5b70eb5a62a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.121s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.835312] env[62923]: DEBUG nova.network.neutron [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.001561] env[62923]: DEBUG nova.scheduler.client.report [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 639.178630] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 639.337891] env[62923]: INFO nova.compute.manager [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] [instance: 5a825d36-7563-4792-8f68-8814ec96cfde] Took 1.03 seconds to deallocate network for instance. [ 639.507291] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.507875] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 639.510743] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.980s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.512205] env[62923]: INFO nova.compute.claims [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.702157] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.017800] env[62923]: DEBUG nova.compute.utils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.023044] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 640.023044] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.095679] env[62923]: DEBUG nova.policy [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '170f26dc599849bd85572e44d8291ab6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '123afbeb445d4863a5997d903f21bbe0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 640.372103] env[62923]: INFO nova.scheduler.client.report [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Deleted allocations for instance 5a825d36-7563-4792-8f68-8814ec96cfde [ 640.521850] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 640.700715] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Successfully created port: f8d891a8-bd8d-4886-8e99-8c67585a6ab3 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.774771] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "c22aa745-0e4a-40fd-903f-edba79cbf88b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.775107] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "c22aa745-0e4a-40fd-903f-edba79cbf88b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.886043] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c2f752b3-1b98-477a-8f24-5817141f0b82 tempest-ServerGroupTestJSON-83206747 tempest-ServerGroupTestJSON-83206747-project-member] Lock "5a825d36-7563-4792-8f68-8814ec96cfde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.946s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.948851] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d13224-0186-474f-a8c0-e9193e0df6f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.954749] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b844c0c-adc3-4676-ab84-a650353cdd4e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.987682] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd396029-784d-469c-8b4a-811126622615 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.995439] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fccf9a6-7377-4ce0-8859-0a413e343194 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.011245] env[62923]: DEBUG nova.compute.provider_tree [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.392310] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 641.424567] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "71dd8aff-4500-4c91-8a46-2a398fd03560" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.424831] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "71dd8aff-4500-4c91-8a46-2a398fd03560" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.514050] env[62923]: DEBUG nova.scheduler.client.report [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 641.533126] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 641.570051] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 641.570313] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 641.570471] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.570649] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 641.570792] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.570932] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 641.571292] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 641.573546] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 641.573546] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 641.573546] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 641.573546] env[62923]: DEBUG nova.virt.hardware [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.573546] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51233de4-d0af-41ef-8a82-07ca0c06af40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.581110] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0946d41b-4539-486e-b121-63c3fcb8a1f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.911982] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "d65ce1f0-e9de-4fc8-828b-95aec5615f95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.912239] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "d65ce1f0-e9de-4fc8-828b-95aec5615f95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.913752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.020690] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.021691] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 642.025839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.399s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.027768] env[62923]: INFO nova.compute.claims [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.182483] env[62923]: DEBUG nova.compute.manager [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Received event network-changed-f8d891a8-bd8d-4886-8e99-8c67585a6ab3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 642.182644] env[62923]: DEBUG nova.compute.manager [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Refreshing instance network info cache due to event network-changed-f8d891a8-bd8d-4886-8e99-8c67585a6ab3. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 642.183126] env[62923]: DEBUG oslo_concurrency.lockutils [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] Acquiring lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.183126] env[62923]: DEBUG oslo_concurrency.lockutils [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] Acquired lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.183233] env[62923]: DEBUG nova.network.neutron [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Refreshing network info cache for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.275189] env[62923]: ERROR nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 642.275189] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.275189] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 642.275189] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 642.275189] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.275189] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.275189] env[62923]: ERROR nova.compute.manager raise self.value [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 642.275189] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 642.275189] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.275189] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 642.275631] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.275631] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 642.275631] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 642.275631] env[62923]: ERROR nova.compute.manager [ 642.275631] env[62923]: Traceback (most recent call last): [ 642.275631] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 642.275631] env[62923]: listener.cb(fileno) [ 642.275631] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.275631] env[62923]: result = function(*args, **kwargs) [ 642.275631] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 642.275631] env[62923]: return func(*args, **kwargs) [ 642.275631] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.275631] env[62923]: raise e [ 642.275631] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.275631] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 642.275631] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 642.275631] env[62923]: created_port_ids = self._update_ports_for_instance( [ 642.275631] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 642.275631] env[62923]: with excutils.save_and_reraise_exception(): [ 642.275631] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.275631] env[62923]: self.force_reraise() [ 642.275631] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.275631] env[62923]: raise self.value [ 642.275631] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 642.275631] env[62923]: updated_port = self._update_port( [ 642.275631] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.275631] env[62923]: _ensure_no_port_binding_failure(port) [ 642.275631] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.275631] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 642.276678] env[62923]: nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 642.276678] env[62923]: Removing descriptor: 21 [ 642.276678] env[62923]: ERROR nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Traceback (most recent call last): [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] yield resources [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self.driver.spawn(context, instance, image_meta, [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.276678] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] vm_ref = self.build_virtual_machine(instance, [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] for vif in network_info: [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return self._sync_wrapper(fn, *args, **kwargs) [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self.wait() [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self[:] = self._gt.wait() [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return self._exit_event.wait() [ 642.277028] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] result = hub.switch() [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return self.greenlet.switch() [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] result = function(*args, **kwargs) [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return func(*args, **kwargs) [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] raise e [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] nwinfo = self.network_api.allocate_for_instance( [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 642.277468] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] created_port_ids = self._update_ports_for_instance( [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] with excutils.save_and_reraise_exception(): [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self.force_reraise() [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] raise self.value [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] updated_port = self._update_port( [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] _ensure_no_port_binding_failure(port) [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.277822] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] raise exception.PortBindingFailed(port_id=port['id']) [ 642.278186] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 642.278186] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] [ 642.278186] env[62923]: INFO nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Terminating instance [ 642.280478] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquiring lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.538631] env[62923]: DEBUG nova.compute.utils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 642.540342] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 642.540538] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 642.592604] env[62923]: DEBUG nova.policy [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8b960aefcac422f9c20b52d4411e526', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f3881e5599c4e0b9f12587d11ba0cb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 642.719090] env[62923]: DEBUG nova.network.neutron [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.828170] env[62923]: DEBUG nova.network.neutron [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.973316] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Successfully created port: acc24265-4fb3-4bb2-a610-f5bdb7a2f198 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 643.045150] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 643.330735] env[62923]: DEBUG oslo_concurrency.lockutils [req-6f0d86f7-3499-486a-9140-b072f8638444 req-1eb7978e-5f90-4725-b8ca-f711b9a8e85a service nova] Releasing lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.331150] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquired lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.331336] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 643.489475] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2948cc-a191-4dd4-8dfb-3b12d9c80447 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.498842] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8428341a-c787-44c4-a178-fdb7197e5649 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.532660] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82220f6-d5c9-436d-9480-a55087c91841 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.540234] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3308b0c3-bd98-44d0-a7b2-4f54d716e8e1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.557639] env[62923]: DEBUG nova.compute.provider_tree [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.871694] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.028821] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.060700] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 644.064665] env[62923]: DEBUG nova.scheduler.client.report [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.106276] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 644.106276] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 644.106451] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 644.106521] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 644.106666] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 644.106803] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 644.106996] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 644.107291] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 644.107453] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 644.107607] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 644.107769] env[62923]: DEBUG nova.virt.hardware [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 644.112028] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024d9c7a-80f6-4b10-ab98-a2bc081e3662 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.118953] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a8f8ee-649b-4234-a448-fa6269becc73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.216165] env[62923]: DEBUG nova.compute.manager [req-3a4656f4-74de-4fe8-a33e-eb05642f1fac req-6b804555-daeb-4b8c-bbc5-6228c4327491 service nova] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Received event network-vif-deleted-f8d891a8-bd8d-4886-8e99-8c67585a6ab3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 644.532095] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Releasing lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.532546] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 644.532736] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 644.533072] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ed4ac20-2b8d-4edb-aaba-ac1cde90fda1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.542931] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8e8afe-c8ec-4b15-a6e8-257f487f5950 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.565462] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f247b499-0a04-47ae-98b0-cb3f7f088a62 could not be found. [ 644.565706] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 644.565889] env[62923]: INFO nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Took 0.03 seconds to destroy the instance on the hypervisor. [ 644.566148] env[62923]: DEBUG oslo.service.loopingcall [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.566387] env[62923]: DEBUG nova.compute.manager [-] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 644.566482] env[62923]: DEBUG nova.network.neutron [-] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 644.571998] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.546s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.572487] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.575162] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.205s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.620301] env[62923]: DEBUG nova.network.neutron [-] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.079212] env[62923]: DEBUG nova.compute.utils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.089586] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 645.089764] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.125127] env[62923]: DEBUG nova.network.neutron [-] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.210448] env[62923]: DEBUG nova.policy [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c78663339634954800ce721b3bd24f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e70f424c9eed4a659b19c858ff277027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.237030] env[62923]: ERROR nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 645.237030] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.237030] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.237030] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.237030] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.237030] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.237030] env[62923]: ERROR nova.compute.manager raise self.value [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.237030] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 645.237030] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.237030] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 645.237492] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.237492] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 645.237492] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 645.237492] env[62923]: ERROR nova.compute.manager [ 645.237492] env[62923]: Traceback (most recent call last): [ 645.237492] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 645.237492] env[62923]: listener.cb(fileno) [ 645.237492] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.237492] env[62923]: result = function(*args, **kwargs) [ 645.237492] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.237492] env[62923]: return func(*args, **kwargs) [ 645.237492] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.237492] env[62923]: raise e [ 645.237492] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.237492] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 645.237492] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.237492] env[62923]: created_port_ids = self._update_ports_for_instance( [ 645.237492] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.237492] env[62923]: with excutils.save_and_reraise_exception(): [ 645.237492] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.237492] env[62923]: self.force_reraise() [ 645.237492] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.237492] env[62923]: raise self.value [ 645.237492] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.237492] env[62923]: updated_port = self._update_port( [ 645.237492] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.237492] env[62923]: _ensure_no_port_binding_failure(port) [ 645.237492] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.237492] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 645.239902] env[62923]: nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 645.239902] env[62923]: Removing descriptor: 21 [ 645.239902] env[62923]: ERROR nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Traceback (most recent call last): [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] yield resources [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self.driver.spawn(context, instance, image_meta, [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.239902] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] vm_ref = self.build_virtual_machine(instance, [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] for vif in network_info: [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return self._sync_wrapper(fn, *args, **kwargs) [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self.wait() [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self[:] = self._gt.wait() [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return self._exit_event.wait() [ 645.240682] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] result = hub.switch() [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return self.greenlet.switch() [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] result = function(*args, **kwargs) [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return func(*args, **kwargs) [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] raise e [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] nwinfo = self.network_api.allocate_for_instance( [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 645.241179] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] created_port_ids = self._update_ports_for_instance( [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] with excutils.save_and_reraise_exception(): [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self.force_reraise() [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] raise self.value [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] updated_port = self._update_port( [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] _ensure_no_port_binding_failure(port) [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.241837] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] raise exception.PortBindingFailed(port_id=port['id']) [ 645.243105] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 645.243105] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] [ 645.243105] env[62923]: INFO nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Terminating instance [ 645.243105] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquiring lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.243105] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquired lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.243105] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.590424] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 645.629593] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.629593] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f106b311-fc2f-4811-b7e0-d680de236b78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.629593] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f247b499-0a04-47ae-98b0-cb3f7f088a62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.629593] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance bb28249e-a9d3-4d7d-bd05-128f1110dbca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.629801] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 0c569bd2-7622-4285-9439-209a88f2e84d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 645.631807] env[62923]: INFO nova.compute.manager [-] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Took 1.07 seconds to deallocate network for instance. [ 645.635793] env[62923]: DEBUG nova.compute.claims [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 645.637190] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.734311] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Successfully created port: 3e76600a-59fc-4233-bb90-a265aa02f9f8 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.953994] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.133652] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 646.168411] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.268527] env[62923]: DEBUG nova.compute.manager [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Received event network-changed-acc24265-4fb3-4bb2-a610-f5bdb7a2f198 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 646.268704] env[62923]: DEBUG nova.compute.manager [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Refreshing instance network info cache due to event network-changed-acc24265-4fb3-4bb2-a610-f5bdb7a2f198. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 646.268884] env[62923]: DEBUG oslo_concurrency.lockutils [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] Acquiring lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.608877] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 646.640179] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 646.640465] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 646.640622] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.640798] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 646.640939] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.641591] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 646.641942] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 646.642136] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 646.642310] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 646.642474] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 646.642642] env[62923]: DEBUG nova.virt.hardware [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 646.643390] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7ab06c90-5d19-43fa-b91b-7d17f85d3258 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 646.645173] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a8980f-3bd1-426d-bb09-9e6065663684 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.655919] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800965e0-8568-48e6-9a46-961656dc487d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.672855] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Releasing lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.673430] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 646.673619] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 646.674128] env[62923]: DEBUG oslo_concurrency.lockutils [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] Acquired lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.674304] env[62923]: DEBUG nova.network.neutron [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Refreshing network info cache for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 646.675315] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c36eb64f-8c92-42a1-8d3d-f3a93ef5f4bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.684084] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ede818-0a76-4a23-8938-dd3b37434169 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.713636] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb28249e-a9d3-4d7d-bd05-128f1110dbca could not be found. [ 646.713636] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 646.713636] env[62923]: INFO nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Took 0.04 seconds to destroy the instance on the hypervisor. [ 646.713636] env[62923]: DEBUG oslo.service.loopingcall [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 646.713636] env[62923]: DEBUG nova.compute.manager [-] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 646.713636] env[62923]: DEBUG nova.network.neutron [-] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 646.735965] env[62923]: DEBUG nova.network.neutron [-] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.152104] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7559e4b7-1cfe-438e-8a14-a964c1a76d52 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 647.200483] env[62923]: DEBUG nova.network.neutron [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.241845] env[62923]: DEBUG nova.network.neutron [-] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.371019] env[62923]: DEBUG nova.network.neutron [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.451644] env[62923]: ERROR nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 647.451644] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.451644] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.451644] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.451644] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.451644] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.451644] env[62923]: ERROR nova.compute.manager raise self.value [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.451644] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 647.451644] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.451644] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 647.452070] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.452070] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 647.452070] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 647.452070] env[62923]: ERROR nova.compute.manager [ 647.452070] env[62923]: Traceback (most recent call last): [ 647.452070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 647.452070] env[62923]: listener.cb(fileno) [ 647.452070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.452070] env[62923]: result = function(*args, **kwargs) [ 647.452070] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.452070] env[62923]: return func(*args, **kwargs) [ 647.452070] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.452070] env[62923]: raise e [ 647.452070] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.452070] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 647.452070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.452070] env[62923]: created_port_ids = self._update_ports_for_instance( [ 647.452070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.452070] env[62923]: with excutils.save_and_reraise_exception(): [ 647.452070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.452070] env[62923]: self.force_reraise() [ 647.452070] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.452070] env[62923]: raise self.value [ 647.452070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.452070] env[62923]: updated_port = self._update_port( [ 647.452070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.452070] env[62923]: _ensure_no_port_binding_failure(port) [ 647.452070] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.452070] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 647.453814] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 647.453814] env[62923]: Removing descriptor: 17 [ 647.453814] env[62923]: ERROR nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Traceback (most recent call last): [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] yield resources [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self.driver.spawn(context, instance, image_meta, [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.453814] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] vm_ref = self.build_virtual_machine(instance, [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] for vif in network_info: [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return self._sync_wrapper(fn, *args, **kwargs) [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self.wait() [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self[:] = self._gt.wait() [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return self._exit_event.wait() [ 647.454156] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] result = hub.switch() [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return self.greenlet.switch() [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] result = function(*args, **kwargs) [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return func(*args, **kwargs) [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] raise e [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] nwinfo = self.network_api.allocate_for_instance( [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.454466] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] created_port_ids = self._update_ports_for_instance( [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] with excutils.save_and_reraise_exception(): [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self.force_reraise() [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] raise self.value [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] updated_port = self._update_port( [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] _ensure_no_port_binding_failure(port) [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.455904] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] raise exception.PortBindingFailed(port_id=port['id']) [ 647.456204] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 647.456204] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] [ 647.456204] env[62923]: INFO nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Terminating instance [ 647.456204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquiring lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.456204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquired lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.456204] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.655614] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 9654e9d5-a809-4875-99bb-fd99d7a7fbd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 647.746913] env[62923]: INFO nova.compute.manager [-] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Took 1.03 seconds to deallocate network for instance. [ 647.749752] env[62923]: DEBUG nova.compute.claims [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 647.749752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.873173] env[62923]: DEBUG oslo_concurrency.lockutils [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] Releasing lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.873641] env[62923]: DEBUG nova.compute.manager [req-7a7fb050-20e3-49ce-be16-53af6a7764c5 req-1f85a7ad-7c5d-4b3f-8893-ed2ff89ecfc1 service nova] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Received event network-vif-deleted-acc24265-4fb3-4bb2-a610-f5bdb7a2f198 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.972133] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.036017] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.159374] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 91043784-2e4a-4fa4-87de-1c45971e64c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.317894] env[62923]: DEBUG nova.compute.manager [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Received event network-changed-3e76600a-59fc-4233-bb90-a265aa02f9f8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 648.318107] env[62923]: DEBUG nova.compute.manager [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Refreshing instance network info cache due to event network-changed-3e76600a-59fc-4233-bb90-a265aa02f9f8. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 648.318295] env[62923]: DEBUG oslo_concurrency.lockutils [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] Acquiring lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.539393] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Releasing lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.539393] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 648.539393] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 648.539692] env[62923]: DEBUG oslo_concurrency.lockutils [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] Acquired lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.539878] env[62923]: DEBUG nova.network.neutron [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Refreshing network info cache for port 3e76600a-59fc-4233-bb90-a265aa02f9f8 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.541776] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff00670c-79dc-4c8e-905b-ccb6e080bf03 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.554239] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0d68ed-4855-4952-91ea-4d43e4e234aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.581744] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0c569bd2-7622-4285-9439-209a88f2e84d could not be found. [ 648.581744] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.581744] env[62923]: INFO nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 648.581744] env[62923]: DEBUG oslo.service.loopingcall [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.581744] env[62923]: DEBUG nova.compute.manager [-] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 648.581744] env[62923]: DEBUG nova.network.neutron [-] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.607283] env[62923]: DEBUG nova.network.neutron [-] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.662414] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 13e71116-cb20-4fc5-8ceb-3a6098bae438 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 649.065273] env[62923]: DEBUG nova.network.neutron [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.107178] env[62923]: DEBUG nova.network.neutron [-] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.158604] env[62923]: DEBUG nova.network.neutron [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.166720] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance c11d0dcc-e5aa-4d7c-bba5-2853622dde44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 649.609430] env[62923]: INFO nova.compute.manager [-] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Took 1.03 seconds to deallocate network for instance. [ 649.611940] env[62923]: DEBUG nova.compute.claims [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 649.612139] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.661955] env[62923]: DEBUG oslo_concurrency.lockutils [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] Releasing lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.662268] env[62923]: DEBUG nova.compute.manager [req-52493f6c-081b-44e8-b133-bdaf9a9e1215 req-fe850aa4-6745-4b47-9191-1763f30acd19 service nova] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Received event network-vif-deleted-3e76600a-59fc-4233-bb90-a265aa02f9f8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 649.671232] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 81c87881-bf63-4622-a0cb-6e38680a8f14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.039169] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "f76d2304-7a4e-4f18-80de-ecb0b67bec28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.039407] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "f76d2304-7a4e-4f18-80de-ecb0b67bec28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.174393] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d45fe9ea-8538-47da-b8dd-c67f8863a812 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 650.677157] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 325e8102-c129-40f4-b61d-1976d2a1fe42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 651.180234] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance fa7295fe-b893-455b-9d4b-4013c187c288 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 651.682709] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 627ebcab-90f9-4ebe-baf9-52fe808ec8c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.186076] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance c370e9a9-3c09-418c-b2fc-e75323298518 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.690147] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f853c572-ad40-4cce-83d4-d5f11b42c37f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.193360] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b48be393-189f-4093-b079-fe555192e7ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.696213] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance a701b2b9-10df-4ba3-8b78-b6b486d8f1db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.200543] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance fcee63c5-eaa3-4d8c-a612-9c30087433e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.703912] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 9b334c72-11f5-4165-a350-09fe5487a9a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.207455] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance bf0222ef-b86f-4d85-ab75-96661b90a4b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.710845] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance ac14f710-41c0-429c-92a3-46acceace3fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.214300] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 78daba16-0c0f-4db6-bde1-70d960a6e7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.717599] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7831bfb8-b336-4338-923f-c759a5c67c06 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.221360] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance c22aa745-0e4a-40fd-903f-edba79cbf88b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.723853] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 71dd8aff-4500-4c91-8a46-2a398fd03560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.228217] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d65ce1f0-e9de-4fc8-828b-95aec5615f95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.228449] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 658.228597] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 658.561044] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718dcf5c-eb77-4b1c-a46d-6c9351608efa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.568828] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68253f88-b8e4-456c-9e0c-088e591772d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.599340] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1b3299-01e3-4404-bc85-abc6d97aa160 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.608444] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f798f3e2-84a3-41cc-9cbd-f51990fb309d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.622187] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.125861] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.630504] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 659.630749] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 15.056s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.631029] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.984s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.632552] env[62923]: INFO nova.compute.claims [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.979023] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a932bcd-844a-4500-8496-0c67fabd4f21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.986433] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cddb6c-3f47-4eb6-bf7e-2bfde701e4ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.016086] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1593c139-b62f-40b1-b404-e49507a78c29 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.022635] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c83119c-8d78-4a59-bfb3-5ad1a587e24a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.035052] env[62923]: DEBUG nova.compute.provider_tree [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.539060] env[62923]: DEBUG nova.scheduler.client.report [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 662.044875] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.045434] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 662.048108] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.450s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.049531] env[62923]: INFO nova.compute.claims [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.554761] env[62923]: DEBUG nova.compute.utils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.559033] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 662.559033] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 662.595231] env[62923]: DEBUG nova.policy [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68e62d519b19448c8cac7f1b2e55a087', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3b09245b63144e9bbcb2262aef33a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 662.894867] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Successfully created port: 6c8e867c-65d6-4982-a2d7-2c382f37782f {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.061816] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 663.442848] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240b9700-691e-42c9-9381-9c61e43153ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.450824] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efec47ab-09d8-46d2-aefa-8b3fadbdf7f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.494883] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b08472-c9f0-435f-9fd9-661944402932 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.502116] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ab59e4-0f68-499d-b492-430d3ebb3331 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.515594] env[62923]: DEBUG nova.compute.provider_tree [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.858043] env[62923]: DEBUG nova.compute.manager [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Received event network-changed-6c8e867c-65d6-4982-a2d7-2c382f37782f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 663.858043] env[62923]: DEBUG nova.compute.manager [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Refreshing instance network info cache due to event network-changed-6c8e867c-65d6-4982-a2d7-2c382f37782f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 663.858199] env[62923]: DEBUG oslo_concurrency.lockutils [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] Acquiring lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.858344] env[62923]: DEBUG oslo_concurrency.lockutils [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] Acquired lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.858501] env[62923]: DEBUG nova.network.neutron [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Refreshing network info cache for port 6c8e867c-65d6-4982-a2d7-2c382f37782f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.022446] env[62923]: DEBUG nova.scheduler.client.report [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 664.056730] env[62923]: ERROR nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 664.056730] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.056730] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.056730] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.056730] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.056730] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.056730] env[62923]: ERROR nova.compute.manager raise self.value [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.056730] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 664.056730] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.056730] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 664.057249] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.057249] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 664.057249] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 664.057249] env[62923]: ERROR nova.compute.manager [ 664.057249] env[62923]: Traceback (most recent call last): [ 664.057249] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 664.057249] env[62923]: listener.cb(fileno) [ 664.057249] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.057249] env[62923]: result = function(*args, **kwargs) [ 664.057249] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 664.057249] env[62923]: return func(*args, **kwargs) [ 664.057249] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.057249] env[62923]: raise e [ 664.057249] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.057249] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 664.057249] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.057249] env[62923]: created_port_ids = self._update_ports_for_instance( [ 664.057249] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.057249] env[62923]: with excutils.save_and_reraise_exception(): [ 664.057249] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.057249] env[62923]: self.force_reraise() [ 664.057249] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.057249] env[62923]: raise self.value [ 664.057249] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.057249] env[62923]: updated_port = self._update_port( [ 664.057249] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.057249] env[62923]: _ensure_no_port_binding_failure(port) [ 664.057249] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.057249] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 664.057945] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 664.057945] env[62923]: Removing descriptor: 17 [ 664.072585] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 664.101045] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 664.101289] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 664.101440] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.101620] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 664.101764] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.101999] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 664.102258] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 664.102413] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 664.102597] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 664.102772] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 664.102937] env[62923]: DEBUG nova.virt.hardware [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.103892] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84005d21-483e-4954-a6e9-8b26c6e90b94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.112189] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23ceffa-90ed-4563-9a2b-409c671c3a86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.126062] env[62923]: ERROR nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Traceback (most recent call last): [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] yield resources [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self.driver.spawn(context, instance, image_meta, [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] vm_ref = self.build_virtual_machine(instance, [ 664.126062] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] vif_infos = vmwarevif.get_vif_info(self._session, [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] for vif in network_info: [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] return self._sync_wrapper(fn, *args, **kwargs) [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self.wait() [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self[:] = self._gt.wait() [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] return self._exit_event.wait() [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 664.126437] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] current.throw(*self._exc) [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] result = function(*args, **kwargs) [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] return func(*args, **kwargs) [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] raise e [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] nwinfo = self.network_api.allocate_for_instance( [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] created_port_ids = self._update_ports_for_instance( [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] with excutils.save_and_reraise_exception(): [ 664.126904] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self.force_reraise() [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] raise self.value [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] updated_port = self._update_port( [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] _ensure_no_port_binding_failure(port) [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] raise exception.PortBindingFailed(port_id=port['id']) [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 664.127295] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] [ 664.127295] env[62923]: INFO nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Terminating instance [ 664.128989] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.377158] env[62923]: DEBUG nova.network.neutron [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.465283] env[62923]: DEBUG nova.network.neutron [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.527812] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.528340] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.531501] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.634s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.967787] env[62923]: DEBUG oslo_concurrency.lockutils [req-146a4637-14d5-4829-881f-f9b1bc4ca9d6 req-275f5881-663c-4a83-8103-fab5542ace43 service nova] Releasing lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.968268] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.968454] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.036330] env[62923]: DEBUG nova.compute.utils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 665.040641] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 665.040641] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.079011] env[62923]: DEBUG nova.policy [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37debff078b4389813658cbad297e65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0db41047d1004a1d9ca7f663178058da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 665.409395] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Successfully created port: f8dae359-ce8c-428d-9d45-0b8b70f24e5f {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.473435] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697d549d-5d93-438a-84a3-0c4db6c756ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.481293] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92b7e19-715d-4815-baad-5f1a00249919 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.511924] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.514062] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b9fa62-30fd-4c52-b328-70913bc0609c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.521176] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f889b57b-35ad-4542-9b4b-5faa5073c4cc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.534089] env[62923]: DEBUG nova.compute.provider_tree [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.543326] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.655505] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.933320] env[62923]: DEBUG nova.compute.manager [req-44c6309c-cc79-4e98-a13d-4add6ccd81af req-88bbc933-e727-46ad-ac45-0ccd032c789d service nova] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Received event network-vif-deleted-6c8e867c-65d6-4982-a2d7-2c382f37782f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 666.037524] env[62923]: DEBUG nova.scheduler.client.report [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.160044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.160493] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 666.160697] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 666.161018] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c92356aa-809f-450d-a3b9-03f7621a8067 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.169963] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd968fdb-b920-4053-ac12-3f9b01d8843e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.192807] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6 could not be found. [ 666.194192] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.194398] env[62923]: INFO nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 666.194658] env[62923]: DEBUG oslo.service.loopingcall [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.194879] env[62923]: DEBUG nova.compute.manager [-] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 666.194972] env[62923]: DEBUG nova.network.neutron [-] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.219692] env[62923]: DEBUG nova.network.neutron [-] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.544376] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.013s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.545386] env[62923]: ERROR nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Traceback (most recent call last): [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self.driver.spawn(context, instance, image_meta, [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] vm_ref = self.build_virtual_machine(instance, [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.545386] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] for vif in network_info: [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return self._sync_wrapper(fn, *args, **kwargs) [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self.wait() [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self[:] = self._gt.wait() [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return self._exit_event.wait() [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] result = hub.switch() [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.545980] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return self.greenlet.switch() [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] result = function(*args, **kwargs) [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] return func(*args, **kwargs) [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] raise e [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] nwinfo = self.network_api.allocate_for_instance( [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] created_port_ids = self._update_ports_for_instance( [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] with excutils.save_and_reraise_exception(): [ 666.546523] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] self.force_reraise() [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] raise self.value [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] updated_port = self._update_port( [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] _ensure_no_port_binding_failure(port) [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] raise exception.PortBindingFailed(port_id=port['id']) [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] nova.exception.PortBindingFailed: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. [ 666.547225] env[62923]: ERROR nova.compute.manager [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] [ 666.547765] env[62923]: DEBUG nova.compute.utils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.547765] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.797s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.550340] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Build of instance 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab was re-scheduled: Binding failed for port c4824b52-af57-4910-85a8-7621d454e216, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 666.550695] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 666.550922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquiring lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.551085] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Acquired lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.551235] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.552880] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 666.578744] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 666.578972] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 666.579138] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.579320] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 666.579463] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.579607] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 666.579802] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 666.580034] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 666.580238] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 666.580404] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 666.580571] env[62923]: DEBUG nova.virt.hardware [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.581427] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f052e38e-6f02-4cab-a289-7040b70fb927 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.590100] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc29e416-9f06-4caf-ac73-54c9e3281caa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.620311] env[62923]: ERROR nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 666.620311] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.620311] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.620311] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.620311] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.620311] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.620311] env[62923]: ERROR nova.compute.manager raise self.value [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.620311] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 666.620311] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.620311] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 666.620806] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.620806] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 666.620806] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 666.620806] env[62923]: ERROR nova.compute.manager [ 666.620806] env[62923]: Traceback (most recent call last): [ 666.620806] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 666.620806] env[62923]: listener.cb(fileno) [ 666.620806] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.620806] env[62923]: result = function(*args, **kwargs) [ 666.620806] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.620806] env[62923]: return func(*args, **kwargs) [ 666.620806] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.620806] env[62923]: raise e [ 666.620806] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.620806] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 666.620806] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.620806] env[62923]: created_port_ids = self._update_ports_for_instance( [ 666.620806] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.620806] env[62923]: with excutils.save_and_reraise_exception(): [ 666.620806] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.620806] env[62923]: self.force_reraise() [ 666.620806] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.620806] env[62923]: raise self.value [ 666.620806] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.620806] env[62923]: updated_port = self._update_port( [ 666.620806] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.620806] env[62923]: _ensure_no_port_binding_failure(port) [ 666.620806] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.620806] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 666.621646] env[62923]: nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 666.621646] env[62923]: Removing descriptor: 17 [ 666.621646] env[62923]: ERROR nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Traceback (most recent call last): [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] yield resources [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self.driver.spawn(context, instance, image_meta, [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.621646] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] vm_ref = self.build_virtual_machine(instance, [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] for vif in network_info: [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return self._sync_wrapper(fn, *args, **kwargs) [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self.wait() [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self[:] = self._gt.wait() [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return self._exit_event.wait() [ 666.622050] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] result = hub.switch() [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return self.greenlet.switch() [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] result = function(*args, **kwargs) [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return func(*args, **kwargs) [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] raise e [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] nwinfo = self.network_api.allocate_for_instance( [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 666.622404] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] created_port_ids = self._update_ports_for_instance( [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] with excutils.save_and_reraise_exception(): [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self.force_reraise() [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] raise self.value [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] updated_port = self._update_port( [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] _ensure_no_port_binding_failure(port) [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.622742] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] raise exception.PortBindingFailed(port_id=port['id']) [ 666.623109] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 666.623109] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] [ 666.623109] env[62923]: INFO nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Terminating instance [ 666.623201] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.623331] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.623516] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.722055] env[62923]: DEBUG nova.network.neutron [-] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.084612] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.165707] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.224539] env[62923]: INFO nova.compute.manager [-] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Took 1.03 seconds to deallocate network for instance. [ 667.228885] env[62923]: DEBUG nova.compute.claims [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 667.228885] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.275561] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.305494] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.422554] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf353b4f-3f13-463c-99e5-0f58f3c2411c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.429941] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a95356-680d-4cce-a7ec-0ddcae106998 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.459043] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ed0bac-3516-4a57-ac0c-0ce529a0eeff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.467418] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de76149-71ec-4ca7-a3dc-a0d0b339f7f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.482172] env[62923]: DEBUG nova.compute.provider_tree [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.778239] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Releasing lock "refresh_cache-2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.778610] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 667.778885] env[62923]: DEBUG nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.779118] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.795071] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.808888] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.809261] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 667.809439] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.809700] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-020f0798-ec19-41c6-a88c-257e296b87c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.818411] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f263c90e-0c0e-4f95-90b0-6126fd1fd216 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.838982] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ab06c90-5d19-43fa-b91b-7d17f85d3258 could not be found. [ 667.839207] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.839396] env[62923]: INFO nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Took 0.03 seconds to destroy the instance on the hypervisor. [ 667.839625] env[62923]: DEBUG oslo.service.loopingcall [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.839832] env[62923]: DEBUG nova.compute.manager [-] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.839920] env[62923]: DEBUG nova.network.neutron [-] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.853308] env[62923]: DEBUG nova.network.neutron [-] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.969087] env[62923]: DEBUG nova.compute.manager [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Received event network-changed-f8dae359-ce8c-428d-9d45-0b8b70f24e5f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 667.969278] env[62923]: DEBUG nova.compute.manager [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Refreshing instance network info cache due to event network-changed-f8dae359-ce8c-428d-9d45-0b8b70f24e5f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 667.969481] env[62923]: DEBUG oslo_concurrency.lockutils [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] Acquiring lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.969617] env[62923]: DEBUG oslo_concurrency.lockutils [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] Acquired lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.969774] env[62923]: DEBUG nova.network.neutron [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Refreshing network info cache for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.985116] env[62923]: DEBUG nova.scheduler.client.report [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 668.297368] env[62923]: DEBUG nova.network.neutron [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.355179] env[62923]: DEBUG nova.network.neutron [-] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.486193] env[62923]: DEBUG nova.network.neutron [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.490061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.943s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.490395] env[62923]: ERROR nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Traceback (most recent call last): [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self.driver.spawn(context, instance, image_meta, [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] vm_ref = self.build_virtual_machine(instance, [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] vif_infos = vmwarevif.get_vif_info(self._session, [ 668.490395] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] for vif in network_info: [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return self._sync_wrapper(fn, *args, **kwargs) [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self.wait() [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self[:] = self._gt.wait() [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return self._exit_event.wait() [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] result = hub.switch() [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 668.490688] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return self.greenlet.switch() [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] result = function(*args, **kwargs) [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] return func(*args, **kwargs) [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] raise e [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] nwinfo = self.network_api.allocate_for_instance( [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] created_port_ids = self._update_ports_for_instance( [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] with excutils.save_and_reraise_exception(): [ 668.490978] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] self.force_reraise() [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] raise self.value [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] updated_port = self._update_port( [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] _ensure_no_port_binding_failure(port) [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] raise exception.PortBindingFailed(port_id=port['id']) [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] nova.exception.PortBindingFailed: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. [ 668.491292] env[62923]: ERROR nova.compute.manager [instance: f106b311-fc2f-4811-b7e0-d680de236b78] [ 668.491560] env[62923]: DEBUG nova.compute.utils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 668.492727] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Build of instance f106b311-fc2f-4811-b7e0-d680de236b78 was re-scheduled: Binding failed for port eee5f468-9531-469a-8dbe-eed2faf79c66, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 668.494053] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 668.494053] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquiring lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.494053] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Acquired lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.494053] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.494582] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.768s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.495983] env[62923]: INFO nova.compute.claims [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.576140] env[62923]: DEBUG nova.network.neutron [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.799767] env[62923]: INFO nova.compute.manager [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] [instance: 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab] Took 1.02 seconds to deallocate network for instance. [ 668.858376] env[62923]: INFO nova.compute.manager [-] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Took 1.02 seconds to deallocate network for instance. [ 668.860531] env[62923]: DEBUG nova.compute.claims [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 668.860714] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.023719] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.078548] env[62923]: DEBUG oslo_concurrency.lockutils [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] Releasing lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.078548] env[62923]: DEBUG nova.compute.manager [req-dd6b5c6d-ff3a-4a36-970d-64e30d822cc1 req-79c5ec14-f03a-41ab-8ebc-b543a9f89de6 service nova] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Received event network-vif-deleted-f8dae359-ce8c-428d-9d45-0b8b70f24e5f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 669.099124] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.601766] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Releasing lock "refresh_cache-f106b311-fc2f-4811-b7e0-d680de236b78" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.602206] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 669.602206] env[62923]: DEBUG nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 669.602336] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.631917] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.826659] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db551018-7670-4e05-a788-fe7b4bcea558 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.834615] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f786b1ec-853a-4c7e-a68b-8997728fc038 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.838882] env[62923]: INFO nova.scheduler.client.report [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Deleted allocations for instance 2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab [ 669.872012] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46875304-38ea-433e-b392-7c05de140bed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.879785] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eee8b22-e229-461c-a26e-437cf7cf419c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.892947] env[62923]: DEBUG nova.compute.provider_tree [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.138112] env[62923]: DEBUG nova.network.neutron [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.347868] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7d8f7ebd-a33a-46ae-8eb6-9825d699c333 tempest-ServersNegativeTestJSON-88441782 tempest-ServersNegativeTestJSON-88441782-project-member] Lock "2e1217a9-e3b5-4f8c-af4e-3d5da315b3ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.500s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.396530] env[62923]: DEBUG nova.scheduler.client.report [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.641195] env[62923]: INFO nova.compute.manager [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] [instance: f106b311-fc2f-4811-b7e0-d680de236b78] Took 1.04 seconds to deallocate network for instance. [ 670.849742] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 670.901559] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.902096] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 670.904796] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.203s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.906388] env[62923]: INFO nova.compute.claims [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 671.378445] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.414030] env[62923]: DEBUG nova.compute.utils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.417661] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 671.417661] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 671.474781] env[62923]: DEBUG nova.policy [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd02dc5d31a68497c880095c8dfe907f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5dce93a611f490baf6fcdbcd9416e36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 671.674804] env[62923]: INFO nova.scheduler.client.report [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Deleted allocations for instance f106b311-fc2f-4811-b7e0-d680de236b78 [ 671.919629] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 672.038967] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Successfully created port: b73a1f92-867c-4fcc-8ca9-271006fb7769 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.188088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d5b93fe7-e302-4704-bb9e-957aa69502c5 tempest-ServersTestJSON-1964179335 tempest-ServersTestJSON-1964179335-project-member] Lock "f106b311-fc2f-4811-b7e0-d680de236b78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.319s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.344842] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0421cbb-8259-4abe-8260-a731235a3878 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.352626] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c202c899-a8d2-42c6-b768-52445b526045 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.391844] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583b6ee5-7664-406f-bdf2-63cbd03fbdd3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.402142] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7534cecf-dcfc-455c-b644-50c1085e3686 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.412888] env[62923]: DEBUG nova.compute.provider_tree [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.689835] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 672.917043] env[62923]: DEBUG nova.scheduler.client.report [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.928277] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 672.962854] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.963122] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.963280] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.963485] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.963630] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.963781] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.963981] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.967315] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.967315] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.967315] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.967315] env[62923]: DEBUG nova.virt.hardware [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.967315] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a58c864-2de2-49c3-a3b6-8bce6273d097 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.975659] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a40350-12a5-45e7-ac8b-6e9d6b226a4e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.042526] env[62923]: DEBUG nova.compute.manager [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Received event network-changed-b73a1f92-867c-4fcc-8ca9-271006fb7769 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 673.042741] env[62923]: DEBUG nova.compute.manager [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Refreshing instance network info cache due to event network-changed-b73a1f92-867c-4fcc-8ca9-271006fb7769. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 673.042963] env[62923]: DEBUG oslo_concurrency.lockutils [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] Acquiring lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.043120] env[62923]: DEBUG oslo_concurrency.lockutils [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] Acquired lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.043279] env[62923]: DEBUG nova.network.neutron [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Refreshing network info cache for port b73a1f92-867c-4fcc-8ca9-271006fb7769 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 673.219021] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.275464] env[62923]: ERROR nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 673.275464] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.275464] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.275464] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.275464] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.275464] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.275464] env[62923]: ERROR nova.compute.manager raise self.value [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.275464] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 673.275464] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.275464] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 673.275896] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.275896] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 673.275896] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 673.275896] env[62923]: ERROR nova.compute.manager [ 673.275896] env[62923]: Traceback (most recent call last): [ 673.275896] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 673.275896] env[62923]: listener.cb(fileno) [ 673.275896] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 673.275896] env[62923]: result = function(*args, **kwargs) [ 673.275896] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 673.275896] env[62923]: return func(*args, **kwargs) [ 673.275896] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 673.275896] env[62923]: raise e [ 673.275896] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.275896] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 673.275896] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.275896] env[62923]: created_port_ids = self._update_ports_for_instance( [ 673.275896] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.275896] env[62923]: with excutils.save_and_reraise_exception(): [ 673.275896] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.275896] env[62923]: self.force_reraise() [ 673.275896] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.275896] env[62923]: raise self.value [ 673.275896] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.275896] env[62923]: updated_port = self._update_port( [ 673.275896] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.275896] env[62923]: _ensure_no_port_binding_failure(port) [ 673.275896] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.275896] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 673.276580] env[62923]: nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 673.276580] env[62923]: Removing descriptor: 21 [ 673.276580] env[62923]: ERROR nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Traceback (most recent call last): [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] yield resources [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self.driver.spawn(context, instance, image_meta, [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self._vmops.spawn(context, instance, image_meta, injected_files, [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 673.276580] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] vm_ref = self.build_virtual_machine(instance, [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] vif_infos = vmwarevif.get_vif_info(self._session, [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] for vif in network_info: [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return self._sync_wrapper(fn, *args, **kwargs) [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self.wait() [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self[:] = self._gt.wait() [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return self._exit_event.wait() [ 673.277097] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] result = hub.switch() [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return self.greenlet.switch() [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] result = function(*args, **kwargs) [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return func(*args, **kwargs) [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] raise e [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] nwinfo = self.network_api.allocate_for_instance( [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.277425] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] created_port_ids = self._update_ports_for_instance( [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] with excutils.save_and_reraise_exception(): [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self.force_reraise() [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] raise self.value [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] updated_port = self._update_port( [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] _ensure_no_port_binding_failure(port) [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.277730] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] raise exception.PortBindingFailed(port_id=port['id']) [ 673.278394] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 673.278394] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] [ 673.278394] env[62923]: INFO nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Terminating instance [ 673.278695] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.422577] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.423103] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 673.429018] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.512s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.429018] env[62923]: INFO nova.compute.claims [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.568102] env[62923]: DEBUG nova.network.neutron [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.682565] env[62923]: DEBUG nova.network.neutron [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.934099] env[62923]: DEBUG nova.compute.utils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 673.935980] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 673.936289] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.998216] env[62923]: DEBUG nova.policy [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd02dc5d31a68497c880095c8dfe907f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5dce93a611f490baf6fcdbcd9416e36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 674.186408] env[62923]: DEBUG oslo_concurrency.lockutils [req-bc48a2b9-bea6-42bb-bee4-90a6cdca5796 req-0a03aee6-41e5-40b9-842f-aa931ece00cb service nova] Releasing lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.186838] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquired lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.187082] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 674.309547] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Successfully created port: 3c73ae97-c9b7-4453-be1f-a7271494dd19 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.436698] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 674.716932] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.879771] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.919046] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48687328-a217-4e71-8aa8-e6ffb8c817e0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.928296] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a0b647-b34e-4702-8330-05f4a184fb67 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.971705] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f151fb-1cdd-4556-b4c5-a8ec90f653a5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.980450] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951a159f-c2dd-4cfa-a61e-279b73498cf7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.995083] env[62923]: DEBUG nova.compute.provider_tree [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.141914] env[62923]: DEBUG nova.compute.manager [req-726255cf-a8a4-4c0f-8003-353c85450451 req-27623f5a-0e36-4d57-969b-d89210195072 service nova] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Received event network-vif-deleted-b73a1f92-867c-4fcc-8ca9-271006fb7769 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 675.384144] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Releasing lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.384583] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 675.384769] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 675.385091] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0a85883-0e87-4ca4-a5a9-ee834fd92c2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.393657] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e78fdf-ea4c-424a-9d0a-8b165ff0fb42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.414648] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7559e4b7-1cfe-438e-8a14-a964c1a76d52 could not be found. [ 675.414859] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.415047] env[62923]: INFO nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Took 0.03 seconds to destroy the instance on the hypervisor. [ 675.415286] env[62923]: DEBUG oslo.service.loopingcall [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 675.415499] env[62923]: DEBUG nova.compute.manager [-] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 675.415608] env[62923]: DEBUG nova.network.neutron [-] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.446106] env[62923]: DEBUG nova.network.neutron [-] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 675.472700] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 675.493569] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 675.493816] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 675.493967] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.494156] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 675.494295] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.494521] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 675.494757] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 675.494912] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 675.495082] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 675.495240] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 675.495403] env[62923]: DEBUG nova.virt.hardware [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 675.496259] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1fcc09f-e810-4d6e-b569-df93393d545d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.499340] env[62923]: DEBUG nova.scheduler.client.report [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.507609] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bae8a6d-fb9b-45c1-8bd9-c022e351f1ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.574107] env[62923]: ERROR nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 675.574107] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.574107] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.574107] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.574107] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.574107] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.574107] env[62923]: ERROR nova.compute.manager raise self.value [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.574107] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 675.574107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.574107] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 675.574653] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.574653] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 675.574653] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 675.574653] env[62923]: ERROR nova.compute.manager [ 675.574653] env[62923]: Traceback (most recent call last): [ 675.574653] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 675.574653] env[62923]: listener.cb(fileno) [ 675.574653] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.574653] env[62923]: result = function(*args, **kwargs) [ 675.574653] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 675.574653] env[62923]: return func(*args, **kwargs) [ 675.574653] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.574653] env[62923]: raise e [ 675.574653] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.574653] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 675.574653] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.574653] env[62923]: created_port_ids = self._update_ports_for_instance( [ 675.574653] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.574653] env[62923]: with excutils.save_and_reraise_exception(): [ 675.574653] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.574653] env[62923]: self.force_reraise() [ 675.574653] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.574653] env[62923]: raise self.value [ 675.574653] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.574653] env[62923]: updated_port = self._update_port( [ 675.574653] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.574653] env[62923]: _ensure_no_port_binding_failure(port) [ 675.574653] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.574653] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 675.575461] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 675.575461] env[62923]: Removing descriptor: 21 [ 675.575461] env[62923]: ERROR nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Traceback (most recent call last): [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] yield resources [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self.driver.spawn(context, instance, image_meta, [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 675.575461] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] vm_ref = self.build_virtual_machine(instance, [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] for vif in network_info: [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return self._sync_wrapper(fn, *args, **kwargs) [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self.wait() [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self[:] = self._gt.wait() [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return self._exit_event.wait() [ 675.575837] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] result = hub.switch() [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return self.greenlet.switch() [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] result = function(*args, **kwargs) [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return func(*args, **kwargs) [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] raise e [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] nwinfo = self.network_api.allocate_for_instance( [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 675.576235] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] created_port_ids = self._update_ports_for_instance( [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] with excutils.save_and_reraise_exception(): [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self.force_reraise() [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] raise self.value [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] updated_port = self._update_port( [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] _ensure_no_port_binding_failure(port) [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.576565] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] raise exception.PortBindingFailed(port_id=port['id']) [ 675.576874] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 675.576874] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] [ 675.576874] env[62923]: INFO nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Terminating instance [ 675.577986] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.578101] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquired lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.578272] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 675.832692] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.832966] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.948784] env[62923]: DEBUG nova.network.neutron [-] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.003698] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.004288] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 676.006909] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.370s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.100440] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.192478] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.451291] env[62923]: INFO nova.compute.manager [-] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Took 1.04 seconds to deallocate network for instance. [ 676.453705] env[62923]: DEBUG nova.compute.claims [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 676.453882] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.511787] env[62923]: DEBUG nova.compute.utils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.516135] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 676.516307] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 676.573020] env[62923]: DEBUG nova.policy [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1d0006f37a245429e9a2d890d9ed438', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e4e0c7bf0f94416bd94174df3d0c3f8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 676.694887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Releasing lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.695371] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 676.695557] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 676.695838] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cfeb9dd-ecff-4c41-9d21-17e14fbc35d3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.711154] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2492d7e2-4a24-4426-9dc8-98631a1aa68b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.735495] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9654e9d5-a809-4875-99bb-fd99d7a7fbd6 could not be found. [ 676.735749] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 676.735937] env[62923]: INFO nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 676.736189] env[62923]: DEBUG oslo.service.loopingcall [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 676.738492] env[62923]: DEBUG nova.compute.manager [-] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 676.738593] env[62923]: DEBUG nova.network.neutron [-] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 676.772431] env[62923]: DEBUG nova.network.neutron [-] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.896453] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b08928-0b2e-4515-8a5d-a3358e2837cd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.905025] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4f7191-fbc3-4cdf-b68d-7b517845ff58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.936276] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a85ce4-55da-4042-b805-e2ad99d07f21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.943388] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1490f355-8ed5-406f-b5a1-9fb22d82f01f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.957616] env[62923]: DEBUG nova.compute.provider_tree [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.959443] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Successfully created port: 91c4c07c-bd13-4a45-8864-147fe772f374 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.017154] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 677.180706] env[62923]: DEBUG nova.compute.manager [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Received event network-changed-3c73ae97-c9b7-4453-be1f-a7271494dd19 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.180884] env[62923]: DEBUG nova.compute.manager [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Refreshing instance network info cache due to event network-changed-3c73ae97-c9b7-4453-be1f-a7271494dd19. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 677.181166] env[62923]: DEBUG oslo_concurrency.lockutils [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] Acquiring lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.181266] env[62923]: DEBUG oslo_concurrency.lockutils [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] Acquired lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.181454] env[62923]: DEBUG nova.network.neutron [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Refreshing network info cache for port 3c73ae97-c9b7-4453-be1f-a7271494dd19 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.274385] env[62923]: DEBUG nova.network.neutron [-] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.463195] env[62923]: DEBUG nova.scheduler.client.report [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.719109] env[62923]: DEBUG nova.network.neutron [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.778986] env[62923]: INFO nova.compute.manager [-] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Took 1.04 seconds to deallocate network for instance. [ 677.779382] env[62923]: DEBUG nova.compute.claims [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 677.779513] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.856410] env[62923]: DEBUG nova.network.neutron [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.968511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.969226] env[62923]: ERROR nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Traceback (most recent call last): [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self.driver.spawn(context, instance, image_meta, [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self._vmops.spawn(context, instance, image_meta, injected_files, [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] vm_ref = self.build_virtual_machine(instance, [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] vif_infos = vmwarevif.get_vif_info(self._session, [ 677.969226] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] for vif in network_info: [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return self._sync_wrapper(fn, *args, **kwargs) [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self.wait() [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self[:] = self._gt.wait() [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return self._exit_event.wait() [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] result = hub.switch() [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 677.969743] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return self.greenlet.switch() [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] result = function(*args, **kwargs) [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] return func(*args, **kwargs) [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] raise e [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] nwinfo = self.network_api.allocate_for_instance( [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] created_port_ids = self._update_ports_for_instance( [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] with excutils.save_and_reraise_exception(): [ 677.970285] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] self.force_reraise() [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] raise self.value [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] updated_port = self._update_port( [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] _ensure_no_port_binding_failure(port) [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] raise exception.PortBindingFailed(port_id=port['id']) [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] nova.exception.PortBindingFailed: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. [ 677.970813] env[62923]: ERROR nova.compute.manager [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] [ 677.971261] env[62923]: DEBUG nova.compute.utils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 677.971314] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.222s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.974951] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Build of instance f247b499-0a04-47ae-98b0-cb3f7f088a62 was re-scheduled: Binding failed for port f8d891a8-bd8d-4886-8e99-8c67585a6ab3, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 677.975407] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 677.975634] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquiring lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.975844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Acquired lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.975995] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.025774] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 678.068201] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:02:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='e6bd002d-500e-4587-8ba6-edd97affbbee',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-998734448',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 678.068474] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 678.068639] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.068820] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 678.068961] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.069119] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 678.070442] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 678.070442] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 678.070442] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 678.070442] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 678.070442] env[62923]: DEBUG nova.virt.hardware [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.071038] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac8204a-89b8-4686-a593-6b56b8d8e06d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.079729] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64626580-fead-48db-b7db-37cc8b12b042 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.285701] env[62923]: ERROR nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 678.285701] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.285701] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.285701] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.285701] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.285701] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.285701] env[62923]: ERROR nova.compute.manager raise self.value [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.285701] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 678.285701] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.285701] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 678.286242] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.286242] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 678.286242] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 678.286242] env[62923]: ERROR nova.compute.manager [ 678.286242] env[62923]: Traceback (most recent call last): [ 678.286242] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 678.286242] env[62923]: listener.cb(fileno) [ 678.286242] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.286242] env[62923]: result = function(*args, **kwargs) [ 678.286242] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.286242] env[62923]: return func(*args, **kwargs) [ 678.286242] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.286242] env[62923]: raise e [ 678.286242] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.286242] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 678.286242] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.286242] env[62923]: created_port_ids = self._update_ports_for_instance( [ 678.286242] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.286242] env[62923]: with excutils.save_and_reraise_exception(): [ 678.286242] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.286242] env[62923]: self.force_reraise() [ 678.286242] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.286242] env[62923]: raise self.value [ 678.286242] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.286242] env[62923]: updated_port = self._update_port( [ 678.286242] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.286242] env[62923]: _ensure_no_port_binding_failure(port) [ 678.286242] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.286242] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 678.287047] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 678.287047] env[62923]: Removing descriptor: 21 [ 678.287047] env[62923]: ERROR nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Traceback (most recent call last): [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] yield resources [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self.driver.spawn(context, instance, image_meta, [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.287047] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] vm_ref = self.build_virtual_machine(instance, [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] for vif in network_info: [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return self._sync_wrapper(fn, *args, **kwargs) [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self.wait() [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self[:] = self._gt.wait() [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return self._exit_event.wait() [ 678.287386] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] result = hub.switch() [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return self.greenlet.switch() [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] result = function(*args, **kwargs) [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return func(*args, **kwargs) [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] raise e [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] nwinfo = self.network_api.allocate_for_instance( [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.287758] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] created_port_ids = self._update_ports_for_instance( [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] with excutils.save_and_reraise_exception(): [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self.force_reraise() [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] raise self.value [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] updated_port = self._update_port( [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] _ensure_no_port_binding_failure(port) [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.288105] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] raise exception.PortBindingFailed(port_id=port['id']) [ 678.288438] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 678.288438] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] [ 678.288438] env[62923]: INFO nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Terminating instance [ 678.288836] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.288992] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquired lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.289174] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.359271] env[62923]: DEBUG oslo_concurrency.lockutils [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] Releasing lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.359533] env[62923]: DEBUG nova.compute.manager [req-926ea087-317f-4e48-8da5-aaf7c7874b63 req-efef9210-0ec4-4d13-a3c7-5cd865ab970f service nova] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Received event network-vif-deleted-3c73ae97-c9b7-4453-be1f-a7271494dd19 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 678.496727] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.540025] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.803370] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3969e1f-6316-4363-b0f7-976a65066eba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.810853] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.813126] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b651b5bc-1ad0-47b4-a82c-dad63573e28a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.845894] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8219e21c-8bf3-41cd-a36c-8c650ae4bda3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.853038] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115ee8c8-4c80-474f-be8a-ae1c6f223218 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.865548] env[62923]: DEBUG nova.compute.provider_tree [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.882179] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.045894] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Releasing lock "refresh_cache-f247b499-0a04-47ae-98b0-cb3f7f088a62" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.045894] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 679.045894] env[62923]: DEBUG nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.045894] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.059928] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.227244] env[62923]: DEBUG nova.compute.manager [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Received event network-changed-91c4c07c-bd13-4a45-8864-147fe772f374 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.227762] env[62923]: DEBUG nova.compute.manager [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Refreshing instance network info cache due to event network-changed-91c4c07c-bd13-4a45-8864-147fe772f374. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 679.227963] env[62923]: DEBUG oslo_concurrency.lockutils [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] Acquiring lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.368392] env[62923]: DEBUG nova.scheduler.client.report [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 679.384039] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Releasing lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.384449] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 679.384804] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.385174] env[62923]: DEBUG oslo_concurrency.lockutils [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] Acquired lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.385355] env[62923]: DEBUG nova.network.neutron [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Refreshing network info cache for port 91c4c07c-bd13-4a45-8864-147fe772f374 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 679.386833] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57d51693-67d8-4927-a659-3363c550d8b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.396117] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3674b9eb-b048-45f7-bf4d-62dd55e7d4c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.417376] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91043784-2e4a-4fa4-87de-1c45971e64c5 could not be found. [ 679.417579] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.417756] env[62923]: INFO nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 679.417985] env[62923]: DEBUG oslo.service.loopingcall [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.418198] env[62923]: DEBUG nova.compute.manager [-] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.418289] env[62923]: DEBUG nova.network.neutron [-] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.431862] env[62923]: DEBUG nova.network.neutron [-] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.562104] env[62923]: DEBUG nova.network.neutron [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.873416] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.902s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.874110] env[62923]: ERROR nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Traceback (most recent call last): [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self.driver.spawn(context, instance, image_meta, [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] vm_ref = self.build_virtual_machine(instance, [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.874110] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] for vif in network_info: [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return self._sync_wrapper(fn, *args, **kwargs) [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self.wait() [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self[:] = self._gt.wait() [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return self._exit_event.wait() [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] result = hub.switch() [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.874423] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return self.greenlet.switch() [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] result = function(*args, **kwargs) [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] return func(*args, **kwargs) [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] raise e [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] nwinfo = self.network_api.allocate_for_instance( [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] created_port_ids = self._update_ports_for_instance( [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] with excutils.save_and_reraise_exception(): [ 679.874725] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] self.force_reraise() [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] raise self.value [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] updated_port = self._update_port( [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] _ensure_no_port_binding_failure(port) [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] raise exception.PortBindingFailed(port_id=port['id']) [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] nova.exception.PortBindingFailed: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. [ 679.875053] env[62923]: ERROR nova.compute.manager [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] [ 679.875317] env[62923]: DEBUG nova.compute.utils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 679.876041] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.264s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.878794] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Build of instance bb28249e-a9d3-4d7d-bd05-128f1110dbca was re-scheduled: Binding failed for port acc24265-4fb3-4bb2-a610-f5bdb7a2f198, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 679.879206] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 679.879421] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquiring lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.879562] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Acquired lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.879718] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.905415] env[62923]: DEBUG nova.network.neutron [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.934064] env[62923]: DEBUG nova.network.neutron [-] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.975944] env[62923]: DEBUG nova.network.neutron [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.064492] env[62923]: INFO nova.compute.manager [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] [instance: f247b499-0a04-47ae-98b0-cb3f7f088a62] Took 1.02 seconds to deallocate network for instance. [ 680.400969] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.437605] env[62923]: INFO nova.compute.manager [-] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Took 1.02 seconds to deallocate network for instance. [ 680.441912] env[62923]: DEBUG nova.compute.claims [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 680.442123] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.478414] env[62923]: DEBUG oslo_concurrency.lockutils [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] Releasing lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.478671] env[62923]: DEBUG nova.compute.manager [req-2105581e-45d5-42cc-a737-3bfcc0eb6c35 req-b714a70b-691c-4657-bb76-4eff33a318a6 service nova] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Received event network-vif-deleted-91c4c07c-bd13-4a45-8864-147fe772f374 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.481584] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.699455] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf004118-5ad4-4c7d-9f59-93d83c29385f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.706865] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e87472-3402-4ed8-ae47-b443f7e306f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.736894] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3932bf-acd8-4e75-895f-02c9a64f5aea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.743940] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834ab4c4-320d-4688-bf6d-d6e6e85be294 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.758712] env[62923]: DEBUG nova.compute.provider_tree [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.984721] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Releasing lock "refresh_cache-bb28249e-a9d3-4d7d-bd05-128f1110dbca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.985035] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 680.985291] env[62923]: DEBUG nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.985500] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.001035] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.096970] env[62923]: INFO nova.scheduler.client.report [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Deleted allocations for instance f247b499-0a04-47ae-98b0-cb3f7f088a62 [ 681.262032] env[62923]: DEBUG nova.scheduler.client.report [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 681.503648] env[62923]: DEBUG nova.network.neutron [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.607434] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d1dd4a9a-c5fd-48a3-afbb-2177e8e2edb0 tempest-ServerExternalEventsTest-1156490165 tempest-ServerExternalEventsTest-1156490165-project-member] Lock "f247b499-0a04-47ae-98b0-cb3f7f088a62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.110s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.767053] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.891s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.767898] env[62923]: ERROR nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Traceback (most recent call last): [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self.driver.spawn(context, instance, image_meta, [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] vm_ref = self.build_virtual_machine(instance, [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.767898] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] for vif in network_info: [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return self._sync_wrapper(fn, *args, **kwargs) [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self.wait() [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self[:] = self._gt.wait() [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return self._exit_event.wait() [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] result = hub.switch() [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.768250] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return self.greenlet.switch() [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] result = function(*args, **kwargs) [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] return func(*args, **kwargs) [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] raise e [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] nwinfo = self.network_api.allocate_for_instance( [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] created_port_ids = self._update_ports_for_instance( [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] with excutils.save_and_reraise_exception(): [ 681.768614] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] self.force_reraise() [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] raise self.value [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] updated_port = self._update_port( [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] _ensure_no_port_binding_failure(port) [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] raise exception.PortBindingFailed(port_id=port['id']) [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] nova.exception.PortBindingFailed: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. [ 681.768975] env[62923]: ERROR nova.compute.manager [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] [ 681.769303] env[62923]: DEBUG nova.compute.utils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 681.770114] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.541s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.773055] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Build of instance 0c569bd2-7622-4285-9439-209a88f2e84d was re-scheduled: Binding failed for port 3e76600a-59fc-4233-bb90-a265aa02f9f8, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 681.773611] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 681.773837] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquiring lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.774011] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Acquired lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.774235] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.006835] env[62923]: INFO nova.compute.manager [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] [instance: bb28249e-a9d3-4d7d-bd05-128f1110dbca] Took 1.02 seconds to deallocate network for instance. [ 682.110409] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 682.302142] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.406516] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.631515] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.649271] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a99806-e64c-45b6-a838-ab26b88794fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.656997] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60407a3a-b974-4cbf-8d26-512b9e0197e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.688104] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5087c5ab-b3d1-4464-9458-add888869520 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.696055] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dbcc56-5fb7-41f0-b716-2b0144d4b10d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.709117] env[62923]: DEBUG nova.compute.provider_tree [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.909302] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Releasing lock "refresh_cache-0c569bd2-7622-4285-9439-209a88f2e84d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.909535] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 682.909823] env[62923]: DEBUG nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.910485] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.931744] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.038838] env[62923]: INFO nova.scheduler.client.report [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Deleted allocations for instance bb28249e-a9d3-4d7d-bd05-128f1110dbca [ 683.212386] env[62923]: DEBUG nova.scheduler.client.report [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.436692] env[62923]: DEBUG nova.network.neutron [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.548012] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2eda5f35-2a36-4e3c-9689-852075684c01 tempest-ServerMetadataNegativeTestJSON-2091143217 tempest-ServerMetadataNegativeTestJSON-2091143217-project-member] Lock "bb28249e-a9d3-4d7d-bd05-128f1110dbca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.992s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.717932] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.948s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.718720] env[62923]: ERROR nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Traceback (most recent call last): [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self.driver.spawn(context, instance, image_meta, [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] vm_ref = self.build_virtual_machine(instance, [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] vif_infos = vmwarevif.get_vif_info(self._session, [ 683.718720] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] for vif in network_info: [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] return self._sync_wrapper(fn, *args, **kwargs) [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self.wait() [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self[:] = self._gt.wait() [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] return self._exit_event.wait() [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] current.throw(*self._exc) [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 683.719096] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] result = function(*args, **kwargs) [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] return func(*args, **kwargs) [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] raise e [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] nwinfo = self.network_api.allocate_for_instance( [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] created_port_ids = self._update_ports_for_instance( [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] with excutils.save_and_reraise_exception(): [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] self.force_reraise() [ 683.719418] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] raise self.value [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] updated_port = self._update_port( [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] _ensure_no_port_binding_failure(port) [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] raise exception.PortBindingFailed(port_id=port['id']) [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] nova.exception.PortBindingFailed: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. [ 683.719811] env[62923]: ERROR nova.compute.manager [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] [ 683.719811] env[62923]: DEBUG nova.compute.utils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 683.720667] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.860s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.723668] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Build of instance ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6 was re-scheduled: Binding failed for port 6c8e867c-65d6-4982-a2d7-2c382f37782f, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 683.724233] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 683.724488] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.725204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.725204] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 683.939562] env[62923]: INFO nova.compute.manager [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] [instance: 0c569bd2-7622-4285-9439-209a88f2e84d] Took 1.03 seconds to deallocate network for instance. [ 684.051659] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 684.105421] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquiring lock "db26908c-6aa6-47b8-a3c4-461247e36d85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.105662] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Lock "db26908c-6aa6-47b8-a3c4-461247e36d85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.260136] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.347604] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.589172] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.650055] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff1faed-fb97-44a0-a02a-192cb31feda1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.655618] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3af540-a904-42d4-a53a-89ef5aad975a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.693052] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f61c91-3773-465c-8fe1-e46a05ce875a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.698143] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88dd1989-8237-4aac-b380-f8d2b9c17555 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.714108] env[62923]: DEBUG nova.compute.provider_tree [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.851972] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.852161] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 684.852365] env[62923]: DEBUG nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 684.852532] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.866883] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.980277] env[62923]: INFO nova.scheduler.client.report [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Deleted allocations for instance 0c569bd2-7622-4285-9439-209a88f2e84d [ 685.217564] env[62923]: DEBUG nova.scheduler.client.report [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.372716] env[62923]: DEBUG nova.network.neutron [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.489938] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c7bb7320-855c-4c1f-989b-95e89d5c2d31 tempest-ServerDiagnosticsNegativeTest-1223502785 tempest-ServerDiagnosticsNegativeTest-1223502785-project-member] Lock "0c569bd2-7622-4285-9439-209a88f2e84d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.904s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.722407] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.002s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.723052] env[62923]: ERROR nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Traceback (most recent call last): [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self.driver.spawn(context, instance, image_meta, [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] vm_ref = self.build_virtual_machine(instance, [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.723052] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] for vif in network_info: [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return self._sync_wrapper(fn, *args, **kwargs) [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self.wait() [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self[:] = self._gt.wait() [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return self._exit_event.wait() [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] result = hub.switch() [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.723373] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return self.greenlet.switch() [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] result = function(*args, **kwargs) [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] return func(*args, **kwargs) [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] raise e [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] nwinfo = self.network_api.allocate_for_instance( [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] created_port_ids = self._update_ports_for_instance( [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] with excutils.save_and_reraise_exception(): [ 685.723738] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] self.force_reraise() [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] raise self.value [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] updated_port = self._update_port( [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] _ensure_no_port_binding_failure(port) [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] raise exception.PortBindingFailed(port_id=port['id']) [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] nova.exception.PortBindingFailed: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. [ 685.724080] env[62923]: ERROR nova.compute.manager [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] [ 685.724359] env[62923]: DEBUG nova.compute.utils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 685.725423] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.347s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.726744] env[62923]: INFO nova.compute.claims [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.730116] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Build of instance 7ab06c90-5d19-43fa-b91b-7d17f85d3258 was re-scheduled: Binding failed for port f8dae359-ce8c-428d-9d45-0b8b70f24e5f, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 685.730557] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 685.730794] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.731564] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.731564] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.875513] env[62923]: INFO nova.compute.manager [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6] Took 1.02 seconds to deallocate network for instance. [ 685.993110] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 686.250494] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.344891] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.518353] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.851441] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-7ab06c90-5d19-43fa-b91b-7d17f85d3258" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.851441] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 686.851441] env[62923]: DEBUG nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.851441] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.877865] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.903081] env[62923]: INFO nova.scheduler.client.report [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocations for instance ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6 [ 687.082045] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bece9d-f844-49db-a373-5f7c3dbc47ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.088506] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b7ea75-6adc-46f3-bb20-5dd561c7fd8b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.118792] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e028abec-8bf4-490f-92ec-389e8bea33b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.127299] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13399ef-2332-4169-b538-5f1c9388fcfd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.143007] env[62923]: DEBUG nova.compute.provider_tree [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.380330] env[62923]: DEBUG nova.network.neutron [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.412319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f5410637-17dd-41d5-8c23-30f73a1958b3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "ab6d68e8-d1c0-429d-a99b-6c40d66fb6d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.127s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.646237] env[62923]: DEBUG nova.scheduler.client.report [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.885356] env[62923]: INFO nova.compute.manager [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 7ab06c90-5d19-43fa-b91b-7d17f85d3258] Took 1.04 seconds to deallocate network for instance. [ 687.915162] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 688.154920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.155448] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 688.158509] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.942s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.159901] env[62923]: INFO nova.compute.claims [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.443276] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.665078] env[62923]: DEBUG nova.compute.utils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 688.666479] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 688.666649] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 688.774317] env[62923]: DEBUG nova.policy [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f4a85a64c5045c2a64e3fab12c5d673', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5d530fb253140899a6611ba9b6339cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 688.924715] env[62923]: INFO nova.scheduler.client.report [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleted allocations for instance 7ab06c90-5d19-43fa-b91b-7d17f85d3258 [ 689.173527] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 689.305677] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Successfully created port: fd41eba7-2ea3-4c7e-9541-07f6f6144c1d {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.434092] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c44976f5-ddc9-4ea5-b79a-428bea031118 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "7ab06c90-5d19-43fa-b91b-7d17f85d3258" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.554s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.488841] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "83ead303-c5b9-4600-935b-fa1a77689dcf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.489120] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "83ead303-c5b9-4600-935b-fa1a77689dcf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.581149] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a4f7d1-2175-4ed2-a89e-670c08b577e0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.589211] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1aebd0-1e65-4447-a465-52ad9e0cece8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.625884] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58040e2-e0ff-4eb5-8e10-51f28fa0b2d2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.634936] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e402da-c00e-4520-a1b6-10bffce9ebce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.649837] env[62923]: DEBUG nova.compute.provider_tree [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.939684] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 690.152563] env[62923]: DEBUG nova.scheduler.client.report [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.188731] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 690.221382] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.221623] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.221775] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.221949] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.222100] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.222309] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.222531] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.222684] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.222845] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.223112] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.223170] env[62923]: DEBUG nova.virt.hardware [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.224052] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0c22e2-3523-4280-bc67-cdce3f4b5b6e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.232402] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bce663-c597-42d5-808f-4b16c44bd396 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.466393] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.593052] env[62923]: DEBUG nova.compute.manager [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Received event network-changed-fd41eba7-2ea3-4c7e-9541-07f6f6144c1d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 690.593327] env[62923]: DEBUG nova.compute.manager [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Refreshing instance network info cache due to event network-changed-fd41eba7-2ea3-4c7e-9541-07f6f6144c1d. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 690.593827] env[62923]: DEBUG oslo_concurrency.lockutils [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] Acquiring lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.593827] env[62923]: DEBUG oslo_concurrency.lockutils [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] Acquired lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.593984] env[62923]: DEBUG nova.network.neutron [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Refreshing network info cache for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.658233] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.659060] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 690.661797] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.208s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.789817] env[62923]: ERROR nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 690.789817] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.789817] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.789817] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.789817] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.789817] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.789817] env[62923]: ERROR nova.compute.manager raise self.value [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.789817] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 690.789817] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.789817] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 690.790325] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.790325] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 690.790325] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 690.790325] env[62923]: ERROR nova.compute.manager [ 690.790325] env[62923]: Traceback (most recent call last): [ 690.790325] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 690.790325] env[62923]: listener.cb(fileno) [ 690.790325] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.790325] env[62923]: result = function(*args, **kwargs) [ 690.790325] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 690.790325] env[62923]: return func(*args, **kwargs) [ 690.790325] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 690.790325] env[62923]: raise e [ 690.790325] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.790325] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 690.790325] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.790325] env[62923]: created_port_ids = self._update_ports_for_instance( [ 690.790325] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.790325] env[62923]: with excutils.save_and_reraise_exception(): [ 690.790325] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.790325] env[62923]: self.force_reraise() [ 690.790325] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.790325] env[62923]: raise self.value [ 690.790325] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.790325] env[62923]: updated_port = self._update_port( [ 690.790325] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.790325] env[62923]: _ensure_no_port_binding_failure(port) [ 690.790325] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.790325] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 690.791028] env[62923]: nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 690.791028] env[62923]: Removing descriptor: 21 [ 690.791028] env[62923]: ERROR nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Traceback (most recent call last): [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] yield resources [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self.driver.spawn(context, instance, image_meta, [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self._vmops.spawn(context, instance, image_meta, injected_files, [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 690.791028] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] vm_ref = self.build_virtual_machine(instance, [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] vif_infos = vmwarevif.get_vif_info(self._session, [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] for vif in network_info: [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return self._sync_wrapper(fn, *args, **kwargs) [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self.wait() [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self[:] = self._gt.wait() [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return self._exit_event.wait() [ 690.791365] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] result = hub.switch() [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return self.greenlet.switch() [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] result = function(*args, **kwargs) [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return func(*args, **kwargs) [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] raise e [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] nwinfo = self.network_api.allocate_for_instance( [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.791685] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] created_port_ids = self._update_ports_for_instance( [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] with excutils.save_and_reraise_exception(): [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self.force_reraise() [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] raise self.value [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] updated_port = self._update_port( [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] _ensure_no_port_binding_failure(port) [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.791995] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] raise exception.PortBindingFailed(port_id=port['id']) [ 690.792310] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 690.792310] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] [ 690.792310] env[62923]: INFO nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Terminating instance [ 690.792310] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquiring lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.119266] env[62923]: DEBUG nova.network.neutron [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.168493] env[62923]: DEBUG nova.compute.utils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 691.174921] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 691.176588] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 691.285668] env[62923]: DEBUG nova.policy [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10e6ab2f8c50450d9ce9c06079726912', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb4fde1412c240b288e7337a06fae728', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 691.288616] env[62923]: DEBUG nova.network.neutron [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.560147] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bc554e-28c7-4e54-aaed-8e8cf42fa8ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.574662] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ad24b9-d476-43b2-9a94-563e5de6f490 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.609261] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c7375e-4d43-430c-aa3c-f3e3a9a22bd6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.616933] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12247a96-e144-410c-9518-28387f90b3c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.631066] env[62923]: DEBUG nova.compute.provider_tree [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.675628] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 691.790543] env[62923]: DEBUG oslo_concurrency.lockutils [req-e49f0eb4-74fe-45db-99ad-8efe7c8e090f req-b6780461-7202-413d-882c-681cdb7c5e6a service nova] Releasing lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.792494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquired lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.792494] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.828534] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Successfully created port: 30f00255-95f5-4dea-9292-b132c6e7cc70 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.135515] env[62923]: DEBUG nova.scheduler.client.report [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.332188] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.354094] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Successfully created port: b14bd509-125d-41c0-9638-40c229831f56 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.522180] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.642859] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.979s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.642859] env[62923]: ERROR nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Traceback (most recent call last): [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self.driver.spawn(context, instance, image_meta, [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.642859] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] vm_ref = self.build_virtual_machine(instance, [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] for vif in network_info: [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return self._sync_wrapper(fn, *args, **kwargs) [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self.wait() [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self[:] = self._gt.wait() [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return self._exit_event.wait() [ 692.643209] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] result = hub.switch() [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return self.greenlet.switch() [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] result = function(*args, **kwargs) [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] return func(*args, **kwargs) [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] raise e [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] nwinfo = self.network_api.allocate_for_instance( [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 692.643515] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] created_port_ids = self._update_ports_for_instance( [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] with excutils.save_and_reraise_exception(): [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] self.force_reraise() [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] raise self.value [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] updated_port = self._update_port( [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] _ensure_no_port_binding_failure(port) [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.643850] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] raise exception.PortBindingFailed(port_id=port['id']) [ 692.644168] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] nova.exception.PortBindingFailed: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. [ 692.644168] env[62923]: ERROR nova.compute.manager [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] [ 692.644168] env[62923]: DEBUG nova.compute.utils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 692.644168] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.864s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.647568] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Build of instance 7559e4b7-1cfe-438e-8a14-a964c1a76d52 was re-scheduled: Binding failed for port b73a1f92-867c-4fcc-8ca9-271006fb7769, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 692.648655] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 692.648991] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.649221] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquired lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.649487] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.685674] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 692.714579] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.714579] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.714579] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.714579] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.714768] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.714768] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.714768] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.714768] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.714768] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.719021] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.719021] env[62923]: DEBUG nova.virt.hardware [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.719021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a1c816-bb9a-4dc2-b046-431b415367bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.727058] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1f235a-a1b7-4464-840c-d3f416d00748 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.758885] env[62923]: DEBUG nova.compute.manager [req-dd580390-5b2d-4a23-ab3c-3d0a747ccc89 req-eae35120-2122-4a10-9c1f-8c2347197041 service nova] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Received event network-vif-deleted-fd41eba7-2ea3-4c7e-9541-07f6f6144c1d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 692.853125] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Successfully created port: 42fccb14-8250-45eb-bfb2-081fe4d437b5 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.873301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "c2e48555-68b5-4ed0-8ad6-a87833538df8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.873301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "c2e48555-68b5-4ed0-8ad6-a87833538df8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.027490] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Releasing lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.027490] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 693.027490] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.028191] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29d39c9f-bde7-4ef6-b145-0428a01bf774 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.039515] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ef6e87-a650-49de-88e3-51558fc281c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.070985] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 13e71116-cb20-4fc5-8ceb-3a6098bae438 could not be found. [ 693.071388] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.071708] env[62923]: INFO nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Took 0.04 seconds to destroy the instance on the hypervisor. [ 693.072110] env[62923]: DEBUG oslo.service.loopingcall [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.072470] env[62923]: DEBUG nova.compute.manager [-] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.072654] env[62923]: DEBUG nova.network.neutron [-] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.095144] env[62923]: DEBUG nova.network.neutron [-] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.193438] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.332309] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.510201] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc14808-19ef-4a69-8dea-82021e0c98d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.521676] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eee56b2-7b0e-47c8-84c1-31a743f88b57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.557364] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9029827-24cb-4875-95ea-7170ccebd91f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.562170] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b1de73-57ea-4e42-82f0-210e9796282a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.576151] env[62923]: DEBUG nova.compute.provider_tree [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.599977] env[62923]: DEBUG nova.network.neutron [-] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.839029] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Releasing lock "refresh_cache-7559e4b7-1cfe-438e-8a14-a964c1a76d52" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.839029] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 693.839029] env[62923]: DEBUG nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.839029] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.853423] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.079885] env[62923]: DEBUG nova.scheduler.client.report [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 694.100867] env[62923]: INFO nova.compute.manager [-] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Took 1.03 seconds to deallocate network for instance. [ 694.103097] env[62923]: DEBUG nova.compute.claims [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.103279] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.173900] env[62923]: ERROR nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 694.173900] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 694.173900] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 694.173900] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 694.173900] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 694.173900] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 694.173900] env[62923]: ERROR nova.compute.manager raise self.value [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 694.173900] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 694.173900] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 694.173900] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 694.175022] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 694.175022] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 694.175022] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 694.175022] env[62923]: ERROR nova.compute.manager [ 694.175022] env[62923]: Traceback (most recent call last): [ 694.175022] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 694.175022] env[62923]: listener.cb(fileno) [ 694.175022] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 694.175022] env[62923]: result = function(*args, **kwargs) [ 694.175022] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 694.175022] env[62923]: return func(*args, **kwargs) [ 694.175022] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 694.175022] env[62923]: raise e [ 694.175022] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 694.175022] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 694.175022] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 694.175022] env[62923]: created_port_ids = self._update_ports_for_instance( [ 694.175022] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 694.175022] env[62923]: with excutils.save_and_reraise_exception(): [ 694.175022] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 694.175022] env[62923]: self.force_reraise() [ 694.175022] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 694.175022] env[62923]: raise self.value [ 694.175022] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 694.175022] env[62923]: updated_port = self._update_port( [ 694.175022] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 694.175022] env[62923]: _ensure_no_port_binding_failure(port) [ 694.175022] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 694.175022] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 694.175772] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 694.175772] env[62923]: Removing descriptor: 17 [ 694.175772] env[62923]: ERROR nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Traceback (most recent call last): [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] yield resources [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self.driver.spawn(context, instance, image_meta, [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self._vmops.spawn(context, instance, image_meta, injected_files, [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 694.175772] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] vm_ref = self.build_virtual_machine(instance, [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] vif_infos = vmwarevif.get_vif_info(self._session, [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] for vif in network_info: [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return self._sync_wrapper(fn, *args, **kwargs) [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self.wait() [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self[:] = self._gt.wait() [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return self._exit_event.wait() [ 694.176160] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] result = hub.switch() [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return self.greenlet.switch() [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] result = function(*args, **kwargs) [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return func(*args, **kwargs) [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] raise e [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] nwinfo = self.network_api.allocate_for_instance( [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 694.176535] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] created_port_ids = self._update_ports_for_instance( [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] with excutils.save_and_reraise_exception(): [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self.force_reraise() [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] raise self.value [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] updated_port = self._update_port( [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] _ensure_no_port_binding_failure(port) [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 694.176897] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] raise exception.PortBindingFailed(port_id=port['id']) [ 694.177221] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 694.177221] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] [ 694.177221] env[62923]: INFO nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Terminating instance [ 694.177337] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.177488] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquired lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.177645] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.356205] env[62923]: DEBUG nova.network.neutron [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.585043] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.941s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.586027] env[62923]: ERROR nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Traceback (most recent call last): [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self.driver.spawn(context, instance, image_meta, [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] vm_ref = self.build_virtual_machine(instance, [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 694.586027] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] for vif in network_info: [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return self._sync_wrapper(fn, *args, **kwargs) [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self.wait() [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self[:] = self._gt.wait() [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return self._exit_event.wait() [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] result = hub.switch() [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 694.586817] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return self.greenlet.switch() [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] result = function(*args, **kwargs) [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] return func(*args, **kwargs) [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] raise e [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] nwinfo = self.network_api.allocate_for_instance( [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] created_port_ids = self._update_ports_for_instance( [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] with excutils.save_and_reraise_exception(): [ 694.588303] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] self.force_reraise() [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] raise self.value [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] updated_port = self._update_port( [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] _ensure_no_port_binding_failure(port) [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] raise exception.PortBindingFailed(port_id=port['id']) [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] nova.exception.PortBindingFailed: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. [ 694.588664] env[62923]: ERROR nova.compute.manager [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] [ 694.588936] env[62923]: DEBUG nova.compute.utils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 694.590683] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Build of instance 9654e9d5-a809-4875-99bb-fd99d7a7fbd6 was re-scheduled: Binding failed for port 3c73ae97-c9b7-4453-be1f-a7271494dd19, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 694.591060] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 694.591300] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquiring lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.591444] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Acquired lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.592516] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.593212] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.151s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.723162] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.818351] env[62923]: DEBUG nova.compute.manager [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Received event network-changed-30f00255-95f5-4dea-9292-b132c6e7cc70 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 694.818351] env[62923]: DEBUG nova.compute.manager [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Refreshing instance network info cache due to event network-changed-30f00255-95f5-4dea-9292-b132c6e7cc70. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 694.818540] env[62923]: DEBUG oslo_concurrency.lockutils [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] Acquiring lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.858630] env[62923]: INFO nova.compute.manager [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 7559e4b7-1cfe-438e-8a14-a964c1a76d52] Took 1.02 seconds to deallocate network for instance. [ 694.868276] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.114479] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.238238] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.372518] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Releasing lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.372915] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 695.373116] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.373951] env[62923]: DEBUG oslo_concurrency.lockutils [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] Acquired lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.374168] env[62923]: DEBUG nova.network.neutron [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Refreshing network info cache for port 30f00255-95f5-4dea-9292-b132c6e7cc70 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.375064] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45a46206-e4ca-4fb8-b1af-11e880d1313d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.387570] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76e965e-ae5c-4213-90a3-9768cdac3fa1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.412356] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c11d0dcc-e5aa-4d7c-bba5-2853622dde44 could not be found. [ 695.412584] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.412759] env[62923]: INFO nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Took 0.04 seconds to destroy the instance on the hypervisor. [ 695.412992] env[62923]: DEBUG oslo.service.loopingcall [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.415612] env[62923]: DEBUG nova.compute.manager [-] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 695.415612] env[62923]: DEBUG nova.network.neutron [-] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.447501] env[62923]: DEBUG nova.network.neutron [-] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.485163] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5987c650-dd77-442a-85ce-ce1332fb8c9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.492710] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86225090-163d-4b4e-93da-63fc3f3ca622 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.527524] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d25dd36-f59c-41bf-89ff-5af99cea5248 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.536317] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9eb3e0-c146-4fd0-bf45-2f64d6536d2c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.550994] env[62923]: DEBUG nova.compute.provider_tree [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.740168] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Releasing lock "refresh_cache-9654e9d5-a809-4875-99bb-fd99d7a7fbd6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.740440] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 695.740666] env[62923]: DEBUG nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 695.740792] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.895632] env[62923]: INFO nova.scheduler.client.report [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Deleted allocations for instance 7559e4b7-1cfe-438e-8a14-a964c1a76d52 [ 695.925506] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.960232] env[62923]: DEBUG nova.network.neutron [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.055442] env[62923]: DEBUG nova.scheduler.client.report [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.149723] env[62923]: DEBUG nova.network.neutron [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.403174] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad8de4c4-a5e3-48cb-966b-9cd5c915be38 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "7559e4b7-1cfe-438e-8a14-a964c1a76d52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.680s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.429034] env[62923]: DEBUG nova.network.neutron [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.561772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.968s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.563298] env[62923]: ERROR nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Traceback (most recent call last): [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self.driver.spawn(context, instance, image_meta, [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] vm_ref = self.build_virtual_machine(instance, [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] vif_infos = vmwarevif.get_vif_info(self._session, [ 696.563298] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] for vif in network_info: [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return self._sync_wrapper(fn, *args, **kwargs) [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self.wait() [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self[:] = self._gt.wait() [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return self._exit_event.wait() [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] result = hub.switch() [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 696.563637] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return self.greenlet.switch() [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] result = function(*args, **kwargs) [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] return func(*args, **kwargs) [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] raise e [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] nwinfo = self.network_api.allocate_for_instance( [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] created_port_ids = self._update_ports_for_instance( [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] with excutils.save_and_reraise_exception(): [ 696.564026] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] self.force_reraise() [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] raise self.value [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] updated_port = self._update_port( [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] _ensure_no_port_binding_failure(port) [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] raise exception.PortBindingFailed(port_id=port['id']) [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] nova.exception.PortBindingFailed: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. [ 696.564347] env[62923]: ERROR nova.compute.manager [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] [ 696.565377] env[62923]: DEBUG nova.compute.utils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 696.565377] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.933s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.566047] env[62923]: INFO nova.compute.claims [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.572369] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Build of instance 91043784-2e4a-4fa4-87de-1c45971e64c5 was re-scheduled: Binding failed for port 91c4c07c-bd13-4a45-8864-147fe772f374, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 696.572369] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 696.572369] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquiring lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.572369] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Acquired lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.572573] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.654083] env[62923]: DEBUG oslo_concurrency.lockutils [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] Releasing lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.654083] env[62923]: DEBUG nova.compute.manager [req-efcc995b-7e4e-4fca-8164-aa0b78e1aa3e req-8a29c072-2998-46d1-87de-04a4f2499ea5 service nova] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Received event network-vif-deleted-30f00255-95f5-4dea-9292-b132c6e7cc70 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 696.702493] env[62923]: DEBUG nova.network.neutron [-] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.906202] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 696.932891] env[62923]: INFO nova.compute.manager [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] [instance: 9654e9d5-a809-4875-99bb-fd99d7a7fbd6] Took 1.19 seconds to deallocate network for instance. [ 697.092663] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.165827] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.204574] env[62923]: INFO nova.compute.manager [-] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Took 1.79 seconds to deallocate network for instance. [ 697.208878] env[62923]: DEBUG nova.compute.claims [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 697.209440] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.430558] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.668776] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Releasing lock "refresh_cache-91043784-2e4a-4fa4-87de-1c45971e64c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.668776] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 697.668776] env[62923]: DEBUG nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 697.669040] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.692115] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.944502] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c000db2c-1cf9-4832-88c3-436647bd52bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.952873] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a012e90d-cfc6-464a-b750-9316ed41c137 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.958450] env[62923]: INFO nova.scheduler.client.report [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Deleted allocations for instance 9654e9d5-a809-4875-99bb-fd99d7a7fbd6 [ 697.989652] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739b404f-573c-4cf4-88df-659e04f28ab4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.997530] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a8dca0-3b00-4337-933a-7ba41ae0d925 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.012268] env[62923]: DEBUG nova.compute.provider_tree [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.127343] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquiring lock "8a369d56-8f85-4d04-ac6b-bf2eced7098f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.127567] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Lock "8a369d56-8f85-4d04-ac6b-bf2eced7098f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.195318] env[62923]: DEBUG nova.network.neutron [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.489184] env[62923]: DEBUG oslo_concurrency.lockutils [None req-84dafa4d-ac23-4d6e-9067-be3586dc6302 tempest-ServerRescueNegativeTestJSON-896304397 tempest-ServerRescueNegativeTestJSON-896304397-project-member] Lock "9654e9d5-a809-4875-99bb-fd99d7a7fbd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.649s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.516950] env[62923]: DEBUG nova.scheduler.client.report [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 698.699517] env[62923]: INFO nova.compute.manager [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] [instance: 91043784-2e4a-4fa4-87de-1c45971e64c5] Took 1.03 seconds to deallocate network for instance. [ 698.992552] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 699.023456] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.024178] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 699.028703] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.443s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.029841] env[62923]: INFO nova.compute.claims [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.513223] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.529267] env[62923]: DEBUG nova.compute.utils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 699.531131] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 699.531375] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.612755] env[62923]: DEBUG nova.policy [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22d443c30c7f4cc39435b80259652537', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e339423ef410434b84559c961226923f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 699.733903] env[62923]: INFO nova.scheduler.client.report [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Deleted allocations for instance 91043784-2e4a-4fa4-87de-1c45971e64c5 [ 700.032067] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 700.202523] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Successfully created port: 90939492-8b35-4150-bcd7-213d46d845ef {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.247749] env[62923]: DEBUG oslo_concurrency.lockutils [None req-41e4def3-9406-4a26-9981-9f21deb9ff4b tempest-MigrationsAdminTest-1309469553 tempest-MigrationsAdminTest-1309469553-project-member] Lock "91043784-2e4a-4fa4-87de-1c45971e64c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.036s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.383193] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0831a52b-6dae-4bfe-9aaf-25e8e4f0a742 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.391668] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385da607-d2d0-48e2-beab-caa17efb9f8c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.426908] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786313b0-b9ad-4902-8f2b-2e8da4970931 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.434594] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8280c2-6e88-481e-9899-0c6d78511792 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.450963] env[62923]: DEBUG nova.compute.provider_tree [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.754829] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 700.956020] env[62923]: DEBUG nova.scheduler.client.report [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.042905] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 701.078672] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 701.078921] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 701.079086] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.079518] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 701.080320] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.081130] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 701.081130] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 701.081130] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 701.081130] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 701.081315] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 701.081398] env[62923]: DEBUG nova.virt.hardware [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 701.082444] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271ccf58-9de2-4433-aaf5-347b8bd71fca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.091689] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dda635-86a0-4e8c-864d-f58cef14ad77 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.284173] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.462488] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.463292] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 701.465511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.947s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.466908] env[62923]: INFO nova.compute.claims [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.613476] env[62923]: DEBUG nova.compute.manager [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Received event network-changed-90939492-8b35-4150-bcd7-213d46d845ef {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 701.613666] env[62923]: DEBUG nova.compute.manager [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Refreshing instance network info cache due to event network-changed-90939492-8b35-4150-bcd7-213d46d845ef. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 701.613912] env[62923]: DEBUG oslo_concurrency.lockutils [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] Acquiring lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.614070] env[62923]: DEBUG oslo_concurrency.lockutils [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] Acquired lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.614229] env[62923]: DEBUG nova.network.neutron [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Refreshing network info cache for port 90939492-8b35-4150-bcd7-213d46d845ef {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.755541] env[62923]: ERROR nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 701.755541] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.755541] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.755541] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.755541] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.755541] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.755541] env[62923]: ERROR nova.compute.manager raise self.value [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.755541] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 701.755541] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.755541] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 701.756230] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.756230] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 701.756230] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 701.756230] env[62923]: ERROR nova.compute.manager [ 701.756230] env[62923]: Traceback (most recent call last): [ 701.756230] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 701.756230] env[62923]: listener.cb(fileno) [ 701.756230] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.756230] env[62923]: result = function(*args, **kwargs) [ 701.756230] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.756230] env[62923]: return func(*args, **kwargs) [ 701.756230] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.756230] env[62923]: raise e [ 701.756230] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.756230] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 701.756230] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.756230] env[62923]: created_port_ids = self._update_ports_for_instance( [ 701.756230] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.756230] env[62923]: with excutils.save_and_reraise_exception(): [ 701.756230] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.756230] env[62923]: self.force_reraise() [ 701.756230] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.756230] env[62923]: raise self.value [ 701.756230] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.756230] env[62923]: updated_port = self._update_port( [ 701.756230] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.756230] env[62923]: _ensure_no_port_binding_failure(port) [ 701.756230] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.756230] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 701.757012] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 701.757012] env[62923]: Removing descriptor: 17 [ 701.757012] env[62923]: ERROR nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Traceback (most recent call last): [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] yield resources [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self.driver.spawn(context, instance, image_meta, [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 701.757012] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] vm_ref = self.build_virtual_machine(instance, [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] vif_infos = vmwarevif.get_vif_info(self._session, [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] for vif in network_info: [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return self._sync_wrapper(fn, *args, **kwargs) [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self.wait() [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self[:] = self._gt.wait() [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return self._exit_event.wait() [ 701.757444] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] result = hub.switch() [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return self.greenlet.switch() [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] result = function(*args, **kwargs) [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return func(*args, **kwargs) [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] raise e [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] nwinfo = self.network_api.allocate_for_instance( [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.757822] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] created_port_ids = self._update_ports_for_instance( [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] with excutils.save_and_reraise_exception(): [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self.force_reraise() [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] raise self.value [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] updated_port = self._update_port( [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] _ensure_no_port_binding_failure(port) [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.758157] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] raise exception.PortBindingFailed(port_id=port['id']) [ 701.758484] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 701.758484] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] [ 701.758484] env[62923]: INFO nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Terminating instance [ 701.759128] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquiring lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.971155] env[62923]: DEBUG nova.compute.utils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 701.974419] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 701.974591] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.016154] env[62923]: DEBUG nova.policy [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bc7339e5cc845668864bcdd8d09e610', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd013513ad708456f9a827c8d4974beec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 702.142746] env[62923]: DEBUG nova.network.neutron [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.328265] env[62923]: DEBUG nova.network.neutron [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.480018] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 702.518922] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Successfully created port: 43314fbb-c56a-40d4-be53-83e2ff602344 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.834091] env[62923]: DEBUG oslo_concurrency.lockutils [req-cc4d5e6c-cc65-419a-ae18-61fa0f535932 req-9aecb951-6736-48d5-92f8-5e27c0b57421 service nova] Releasing lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.835146] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquired lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.835146] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.868087] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7deb296-1cd4-4166-a686-e15fc9ebdc49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.876243] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fc96b8-e3e7-4238-892b-9c00afdb5bb2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.908038] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88bfb53-7c45-465a-82f2-d13a6c7dea8b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.917159] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56baff0-bf89-4c13-9e2e-3d4085874d76 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.931810] env[62923]: DEBUG nova.compute.provider_tree [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.363502] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.435493] env[62923]: DEBUG nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.492284] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 703.517440] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 703.517673] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 703.517825] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.518009] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 703.518161] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.518304] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 703.518500] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 703.518651] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 703.518809] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 703.518964] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 703.519144] env[62923]: DEBUG nova.virt.hardware [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 703.520311] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01648eb-eff8-4401-93de-9d4f4fad4007 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.528286] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d02a141-f551-4b08-9c0f-434fcf57efd4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.549228] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.686392] env[62923]: DEBUG nova.compute.manager [req-a0c21b15-f3b9-4cd3-bc10-e7f9e5f9442e req-4257297f-100b-4b87-aff0-2103b53cc5ba service nova] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Received event network-vif-deleted-90939492-8b35-4150-bcd7-213d46d845ef {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 703.941207] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.941740] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 703.946230] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.503s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.948019] env[62923]: INFO nova.compute.claims [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.052244] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Releasing lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.052673] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 704.052858] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.053631] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b7eb797-78ad-4296-a180-aa81cbb8803b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.062416] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b7e834-22cb-459c-b547-9db16ea2a5b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.082983] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81c87881-bf63-4622-a0cb-6e38680a8f14 could not be found. [ 704.083204] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.083482] env[62923]: INFO nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Took 0.03 seconds to destroy the instance on the hypervisor. [ 704.083611] env[62923]: DEBUG oslo.service.loopingcall [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 704.083884] env[62923]: DEBUG nova.compute.manager [-] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 704.083982] env[62923]: DEBUG nova.network.neutron [-] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.106503] env[62923]: DEBUG nova.network.neutron [-] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.365704] env[62923]: ERROR nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 704.365704] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.365704] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.365704] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.365704] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.365704] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.365704] env[62923]: ERROR nova.compute.manager raise self.value [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.365704] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 704.365704] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.365704] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 704.366454] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.366454] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 704.366454] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 704.366454] env[62923]: ERROR nova.compute.manager [ 704.366454] env[62923]: Traceback (most recent call last): [ 704.366454] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 704.366454] env[62923]: listener.cb(fileno) [ 704.366454] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.366454] env[62923]: result = function(*args, **kwargs) [ 704.366454] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 704.366454] env[62923]: return func(*args, **kwargs) [ 704.366454] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.366454] env[62923]: raise e [ 704.366454] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.366454] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 704.366454] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.366454] env[62923]: created_port_ids = self._update_ports_for_instance( [ 704.366454] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.366454] env[62923]: with excutils.save_and_reraise_exception(): [ 704.366454] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.366454] env[62923]: self.force_reraise() [ 704.366454] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.366454] env[62923]: raise self.value [ 704.366454] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.366454] env[62923]: updated_port = self._update_port( [ 704.366454] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.366454] env[62923]: _ensure_no_port_binding_failure(port) [ 704.366454] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.366454] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 704.367283] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 704.367283] env[62923]: Removing descriptor: 17 [ 704.367283] env[62923]: ERROR nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Traceback (most recent call last): [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] yield resources [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self.driver.spawn(context, instance, image_meta, [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.367283] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] vm_ref = self.build_virtual_machine(instance, [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] for vif in network_info: [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return self._sync_wrapper(fn, *args, **kwargs) [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self.wait() [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self[:] = self._gt.wait() [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return self._exit_event.wait() [ 704.367591] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] result = hub.switch() [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return self.greenlet.switch() [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] result = function(*args, **kwargs) [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return func(*args, **kwargs) [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] raise e [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] nwinfo = self.network_api.allocate_for_instance( [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.367921] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] created_port_ids = self._update_ports_for_instance( [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] with excutils.save_and_reraise_exception(): [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self.force_reraise() [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] raise self.value [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] updated_port = self._update_port( [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] _ensure_no_port_binding_failure(port) [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.368261] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] raise exception.PortBindingFailed(port_id=port['id']) [ 704.368643] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 704.368643] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] [ 704.368643] env[62923]: INFO nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Terminating instance [ 704.369362] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.369514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.369670] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 704.453654] env[62923]: DEBUG nova.compute.utils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.456627] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 704.460019] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 704.532164] env[62923]: DEBUG nova.policy [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4b4e513201d4f72bfcf256f1910c52c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '702e4460131b45049ed40bd29339c46b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 704.608880] env[62923]: DEBUG nova.network.neutron [-] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.915121] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.959734] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 705.112174] env[62923]: INFO nova.compute.manager [-] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Took 1.03 seconds to deallocate network for instance. [ 705.114952] env[62923]: DEBUG nova.compute.claims [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 705.114952] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.157147] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.364949] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Successfully created port: 5596e201-05dc-4c8d-8217-4f72482468cf {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.416748] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68945382-a07d-4c78-a244-5af725b2db06 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.426801] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7d5a36-9b8f-474c-bc63-410ff7fc58f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.459685] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c436f1db-5bd5-4eeb-b15a-385f4faaa22f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.471201] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2057bd2-01ec-44f7-bbad-b8c1e011b2dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.485231] env[62923]: DEBUG nova.compute.provider_tree [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.663605] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Releasing lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.664141] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 705.664334] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.664641] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70b8a437-c756-4225-9066-a4423586ea9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.673308] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7b03d3-3d5e-4d3d-80df-5386d2cfb64e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.701022] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d45fe9ea-8538-47da-b8dd-c67f8863a812 could not be found. [ 705.701258] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 705.701434] env[62923]: INFO nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Took 0.04 seconds to destroy the instance on the hypervisor. [ 705.701682] env[62923]: DEBUG oslo.service.loopingcall [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 705.701910] env[62923]: DEBUG nova.compute.manager [-] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.702013] env[62923]: DEBUG nova.network.neutron [-] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 705.735204] env[62923]: DEBUG nova.network.neutron [-] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.978516] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 705.988948] env[62923]: DEBUG nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.004980] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.005238] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.005429] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.005550] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.005687] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.005829] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.006246] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.006454] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.006627] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.006788] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.006993] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.008580] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11ee820-a3b6-4162-988c-15d886c9c91b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.024322] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9dd7af-b34a-4d94-9e44-19ae102d1ff0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.154636] env[62923]: DEBUG nova.compute.manager [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Received event network-changed-43314fbb-c56a-40d4-be53-83e2ff602344 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 706.154636] env[62923]: DEBUG nova.compute.manager [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Refreshing instance network info cache due to event network-changed-43314fbb-c56a-40d4-be53-83e2ff602344. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 706.154636] env[62923]: DEBUG oslo_concurrency.lockutils [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] Acquiring lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.154636] env[62923]: DEBUG oslo_concurrency.lockutils [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] Acquired lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.154822] env[62923]: DEBUG nova.network.neutron [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Refreshing network info cache for port 43314fbb-c56a-40d4-be53-83e2ff602344 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.237771] env[62923]: DEBUG nova.network.neutron [-] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.494371] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.495066] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 706.497796] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.032s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.500250] env[62923]: INFO nova.compute.claims [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.703275] env[62923]: DEBUG nova.network.neutron [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.742988] env[62923]: INFO nova.compute.manager [-] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Took 1.04 seconds to deallocate network for instance. [ 706.745395] env[62923]: DEBUG nova.compute.claims [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 706.745974] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.907903] env[62923]: DEBUG nova.network.neutron [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.006421] env[62923]: DEBUG nova.compute.utils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.011603] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.011603] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.120433] env[62923]: DEBUG nova.policy [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4b4e513201d4f72bfcf256f1910c52c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '702e4460131b45049ed40bd29339c46b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 707.340588] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 707.340588] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.340588] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.340588] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.340588] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.340588] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.340588] env[62923]: ERROR nova.compute.manager raise self.value [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.340588] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 707.340588] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.340588] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 707.341319] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.341319] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 707.341319] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 707.341319] env[62923]: ERROR nova.compute.manager [ 707.341319] env[62923]: Traceback (most recent call last): [ 707.341319] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 707.341319] env[62923]: listener.cb(fileno) [ 707.341319] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 707.341319] env[62923]: result = function(*args, **kwargs) [ 707.341319] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 707.341319] env[62923]: return func(*args, **kwargs) [ 707.341319] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 707.341319] env[62923]: raise e [ 707.341319] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.341319] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 707.341319] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.341319] env[62923]: created_port_ids = self._update_ports_for_instance( [ 707.341319] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.341319] env[62923]: with excutils.save_and_reraise_exception(): [ 707.341319] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.341319] env[62923]: self.force_reraise() [ 707.341319] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.341319] env[62923]: raise self.value [ 707.341319] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.341319] env[62923]: updated_port = self._update_port( [ 707.341319] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.341319] env[62923]: _ensure_no_port_binding_failure(port) [ 707.341319] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.341319] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 707.342397] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 707.342397] env[62923]: Removing descriptor: 17 [ 707.342397] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Traceback (most recent call last): [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] yield resources [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self.driver.spawn(context, instance, image_meta, [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 707.342397] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] vm_ref = self.build_virtual_machine(instance, [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] vif_infos = vmwarevif.get_vif_info(self._session, [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] for vif in network_info: [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return self._sync_wrapper(fn, *args, **kwargs) [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self.wait() [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self[:] = self._gt.wait() [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return self._exit_event.wait() [ 707.342693] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] result = hub.switch() [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return self.greenlet.switch() [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] result = function(*args, **kwargs) [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return func(*args, **kwargs) [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] raise e [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] nwinfo = self.network_api.allocate_for_instance( [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 707.343025] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] created_port_ids = self._update_ports_for_instance( [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] with excutils.save_and_reraise_exception(): [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self.force_reraise() [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] raise self.value [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] updated_port = self._update_port( [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] _ensure_no_port_binding_failure(port) [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 707.343345] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] raise exception.PortBindingFailed(port_id=port['id']) [ 707.343639] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 707.343639] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] [ 707.343639] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Terminating instance [ 707.346340] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.346499] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquired lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.346661] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.410550] env[62923]: DEBUG oslo_concurrency.lockutils [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] Releasing lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.410818] env[62923]: DEBUG nova.compute.manager [req-c015fc7a-886c-4d44-9131-0ad0af6c0d0c req-dfb80e8c-7754-47a4-abe6-e9a900aca57b service nova] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Received event network-vif-deleted-43314fbb-c56a-40d4-be53-83e2ff602344 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 707.512269] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 707.617346] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Successfully created port: a862aa45-f5eb-449d-817a-ed70359fc14c {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.879957] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.898280] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642943f5-5a89-48b5-b4f7-61e36f9e8852 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.907589] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deff8e4-60d1-4296-90d5-dcb0e58c2fda {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.943114] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ed6696-1e82-4b1e-975b-f299111a6469 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.951255] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5cd0d6-3564-4802-9d2f-e58e653d7c2c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.965367] env[62923]: DEBUG nova.compute.provider_tree [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.042465] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.181011] env[62923]: DEBUG nova.compute.manager [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Received event network-changed-5596e201-05dc-4c8d-8217-4f72482468cf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 708.181695] env[62923]: DEBUG nova.compute.manager [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Refreshing instance network info cache due to event network-changed-5596e201-05dc-4c8d-8217-4f72482468cf. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 708.181695] env[62923]: DEBUG oslo_concurrency.lockutils [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] Acquiring lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.469250] env[62923]: DEBUG nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.532114] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 708.547566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Releasing lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.547566] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 708.547566] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.547566] env[62923]: DEBUG oslo_concurrency.lockutils [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] Acquired lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.547566] env[62923]: DEBUG nova.network.neutron [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Refreshing network info cache for port 5596e201-05dc-4c8d-8217-4f72482468cf {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.547918] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0cbe4cb2-82ad-4bbe-9461-4c30ff1b6f57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.558107] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010de5fb-62ae-4fa8-9074-ca1a16335ec4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.579538] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.579754] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.579901] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.580083] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.580256] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.580359] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.580551] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.580697] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.580862] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.581030] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.581200] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.585361] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed17138-fd43-44d1-a097-ba0fcaa51370 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.592749] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf642a0-c000-4c0b-a34b-9070dbafe5a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.602958] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 325e8102-c129-40f4-b61d-1976d2a1fe42 could not be found. [ 708.603192] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.603370] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Took 0.06 seconds to destroy the instance on the hypervisor. [ 708.603610] env[62923]: DEBUG oslo.service.loopingcall [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.604216] env[62923]: DEBUG nova.compute.manager [-] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 708.604314] env[62923]: DEBUG nova.network.neutron [-] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 708.649068] env[62923]: DEBUG nova.network.neutron [-] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.901141] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 708.901141] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 708.901141] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 708.901141] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 708.901141] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.901141] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.901141] env[62923]: ERROR nova.compute.manager raise self.value [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 708.901141] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 708.901141] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.901141] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 708.901564] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.901564] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 708.901564] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 708.901564] env[62923]: ERROR nova.compute.manager [ 708.901564] env[62923]: Traceback (most recent call last): [ 708.901564] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 708.901564] env[62923]: listener.cb(fileno) [ 708.901564] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 708.901564] env[62923]: result = function(*args, **kwargs) [ 708.901564] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 708.901564] env[62923]: return func(*args, **kwargs) [ 708.901564] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 708.901564] env[62923]: raise e [ 708.901564] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 708.901564] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 708.901564] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 708.901564] env[62923]: created_port_ids = self._update_ports_for_instance( [ 708.901564] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 708.901564] env[62923]: with excutils.save_and_reraise_exception(): [ 708.901564] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.901564] env[62923]: self.force_reraise() [ 708.901564] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.901564] env[62923]: raise self.value [ 708.901564] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 708.901564] env[62923]: updated_port = self._update_port( [ 708.901564] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.901564] env[62923]: _ensure_no_port_binding_failure(port) [ 708.901564] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.901564] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 708.902291] env[62923]: nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 708.902291] env[62923]: Removing descriptor: 18 [ 708.902291] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Traceback (most recent call last): [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] yield resources [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self.driver.spawn(context, instance, image_meta, [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self._vmops.spawn(context, instance, image_meta, injected_files, [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 708.902291] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] vm_ref = self.build_virtual_machine(instance, [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] vif_infos = vmwarevif.get_vif_info(self._session, [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] for vif in network_info: [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return self._sync_wrapper(fn, *args, **kwargs) [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self.wait() [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self[:] = self._gt.wait() [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return self._exit_event.wait() [ 708.902613] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] result = hub.switch() [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return self.greenlet.switch() [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] result = function(*args, **kwargs) [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return func(*args, **kwargs) [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] raise e [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] nwinfo = self.network_api.allocate_for_instance( [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 708.902952] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] created_port_ids = self._update_ports_for_instance( [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] with excutils.save_and_reraise_exception(): [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self.force_reraise() [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] raise self.value [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] updated_port = self._update_port( [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] _ensure_no_port_binding_failure(port) [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.903321] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] raise exception.PortBindingFailed(port_id=port['id']) [ 708.903844] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 708.903844] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] [ 708.903844] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Terminating instance [ 708.904799] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.905019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquired lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.905232] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 708.976107] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.478s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.976814] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 708.979796] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.876s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.076701] env[62923]: DEBUG nova.network.neutron [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.151953] env[62923]: DEBUG nova.network.neutron [-] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.159680] env[62923]: DEBUG nova.network.neutron [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.485778] env[62923]: DEBUG nova.compute.utils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.491025] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.491025] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.524629] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.603467] env[62923]: DEBUG nova.policy [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4b4e513201d4f72bfcf256f1910c52c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '702e4460131b45049ed40bd29339c46b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 709.655127] env[62923]: INFO nova.compute.manager [-] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Took 1.05 seconds to deallocate network for instance. [ 709.656568] env[62923]: DEBUG nova.compute.claims [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 709.656741] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.667192] env[62923]: DEBUG oslo_concurrency.lockutils [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] Releasing lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.667437] env[62923]: DEBUG nova.compute.manager [req-f9ee6080-3cd0-4e22-91ff-6e95fa7bce73 req-c795e8dc-0175-4245-853e-baa83706e874 service nova] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Received event network-vif-deleted-5596e201-05dc-4c8d-8217-4f72482468cf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 709.712069] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.918256] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fbbeb7-8f97-4b76-a4a5-44af318024af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.928888] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103e63d4-056c-4cf6-99b3-a8be3ef5b172 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.961375] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5229073d-1ea7-4a72-8dbd-8f47cdb08118 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.970256] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df2131b-90e8-470e-82ea-183a99e86445 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.989934] env[62923]: DEBUG nova.compute.provider_tree [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.992997] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 710.043510] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Successfully created port: 591b4e58-ab5d-4aac-8e11-df8af19f43eb {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.213534] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Releasing lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.217209] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 710.217209] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.217209] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b7ff7fc6-4c2a-4c05-aeb6-656677827889 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.227486] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e727e8-0e8c-4c07-894a-3fbc84947efe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.246804] env[62923]: DEBUG nova.compute.manager [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Received event network-changed-a862aa45-f5eb-449d-817a-ed70359fc14c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 710.247011] env[62923]: DEBUG nova.compute.manager [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Refreshing instance network info cache due to event network-changed-a862aa45-f5eb-449d-817a-ed70359fc14c. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 710.247233] env[62923]: DEBUG oslo_concurrency.lockutils [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] Acquiring lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.247374] env[62923]: DEBUG oslo_concurrency.lockutils [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] Acquired lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.247671] env[62923]: DEBUG nova.network.neutron [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Refreshing network info cache for port a862aa45-f5eb-449d-817a-ed70359fc14c {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.255555] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fa7295fe-b893-455b-9d4b-4013c187c288 could not be found. [ 710.255862] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.255977] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Took 0.04 seconds to destroy the instance on the hypervisor. [ 710.256233] env[62923]: DEBUG oslo.service.loopingcall [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.256446] env[62923]: DEBUG nova.compute.manager [-] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.256537] env[62923]: DEBUG nova.network.neutron [-] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 710.276403] env[62923]: DEBUG nova.network.neutron [-] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.493535] env[62923]: DEBUG nova.scheduler.client.report [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.771308] env[62923]: DEBUG nova.network.neutron [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.781094] env[62923]: DEBUG nova.network.neutron [-] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.949582] env[62923]: DEBUG nova.network.neutron [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.007493] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.027s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.007493] env[62923]: ERROR nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Traceback (most recent call last): [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self.driver.spawn(context, instance, image_meta, [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.007493] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] vm_ref = self.build_virtual_machine(instance, [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] for vif in network_info: [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return self._sync_wrapper(fn, *args, **kwargs) [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self.wait() [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self[:] = self._gt.wait() [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return self._exit_event.wait() [ 711.007836] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] result = hub.switch() [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return self.greenlet.switch() [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] result = function(*args, **kwargs) [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] return func(*args, **kwargs) [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] raise e [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] nwinfo = self.network_api.allocate_for_instance( [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.008249] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] created_port_ids = self._update_ports_for_instance( [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] with excutils.save_and_reraise_exception(): [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] self.force_reraise() [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] raise self.value [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] updated_port = self._update_port( [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] _ensure_no_port_binding_failure(port) [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.008710] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] raise exception.PortBindingFailed(port_id=port['id']) [ 711.009116] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] nova.exception.PortBindingFailed: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. [ 711.009116] env[62923]: ERROR nova.compute.manager [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] [ 711.009116] env[62923]: DEBUG nova.compute.utils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 711.011829] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 711.014599] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Build of instance 13e71116-cb20-4fc5-8ceb-3a6098bae438 was re-scheduled: Binding failed for port fd41eba7-2ea3-4c7e-9541-07f6f6144c1d, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 711.014599] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 711.014599] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquiring lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.014810] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Acquired lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.014887] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.016017] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.807s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.058301] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 711.058958] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 711.059051] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.059581] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 711.059581] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.059581] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 711.059918] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 711.060701] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 711.060701] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 711.060701] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 711.061265] env[62923]: DEBUG nova.virt.hardware [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.062892] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc76247-6511-47cc-92a8-bcdf6e7d1639 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.077555] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc9b787-1c16-4ca7-82f8-3da5e9e31c52 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.245250] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 711.245250] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.245250] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.245250] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.245250] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.245250] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.245250] env[62923]: ERROR nova.compute.manager raise self.value [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.245250] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.245250] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.245250] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.246102] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.246102] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.246102] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 711.246102] env[62923]: ERROR nova.compute.manager [ 711.246102] env[62923]: Traceback (most recent call last): [ 711.246102] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.246102] env[62923]: listener.cb(fileno) [ 711.246102] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.246102] env[62923]: result = function(*args, **kwargs) [ 711.246102] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.246102] env[62923]: return func(*args, **kwargs) [ 711.246102] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.246102] env[62923]: raise e [ 711.246102] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.246102] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 711.246102] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.246102] env[62923]: created_port_ids = self._update_ports_for_instance( [ 711.246102] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.246102] env[62923]: with excutils.save_and_reraise_exception(): [ 711.246102] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.246102] env[62923]: self.force_reraise() [ 711.246102] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.246102] env[62923]: raise self.value [ 711.246102] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.246102] env[62923]: updated_port = self._update_port( [ 711.246102] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.246102] env[62923]: _ensure_no_port_binding_failure(port) [ 711.246102] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.246102] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.247270] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 711.247270] env[62923]: Removing descriptor: 17 [ 711.247270] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Traceback (most recent call last): [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] yield resources [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self.driver.spawn(context, instance, image_meta, [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.247270] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] vm_ref = self.build_virtual_machine(instance, [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] for vif in network_info: [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return self._sync_wrapper(fn, *args, **kwargs) [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self.wait() [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self[:] = self._gt.wait() [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return self._exit_event.wait() [ 711.247751] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] result = hub.switch() [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return self.greenlet.switch() [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] result = function(*args, **kwargs) [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return func(*args, **kwargs) [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] raise e [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] nwinfo = self.network_api.allocate_for_instance( [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.248267] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] created_port_ids = self._update_ports_for_instance( [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] with excutils.save_and_reraise_exception(): [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self.force_reraise() [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] raise self.value [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] updated_port = self._update_port( [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] _ensure_no_port_binding_failure(port) [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.248795] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] raise exception.PortBindingFailed(port_id=port['id']) [ 711.249257] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 711.249257] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] [ 711.249257] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Terminating instance [ 711.249257] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.249257] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquired lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.249257] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.284746] env[62923]: INFO nova.compute.manager [-] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Took 1.03 seconds to deallocate network for instance. [ 711.287900] env[62923]: DEBUG nova.compute.claims [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 711.288096] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.455863] env[62923]: DEBUG oslo_concurrency.lockutils [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] Releasing lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.456238] env[62923]: DEBUG nova.compute.manager [req-cdb31afe-d0e7-494d-895d-48ab5b5bb443 req-f1c709d0-1165-4e94-9c00-c8dbe405164c service nova] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Received event network-vif-deleted-a862aa45-f5eb-449d-817a-ed70359fc14c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.537310] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.658622] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.771904] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.872639] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.879034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fabb079-57d5-4037-b57d-83993af1f66b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.888097] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566587c3-6b5f-4774-9023-9722b5a22850 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.923603] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a070297-dac4-489d-9a94-c8d1b41534f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.931438] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13e3b8e-a090-47e9-833e-709988a619dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.949149] env[62923]: DEBUG nova.compute.provider_tree [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.161574] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Releasing lock "refresh_cache-13e71116-cb20-4fc5-8ceb-3a6098bae438" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.161574] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 712.161794] env[62923]: DEBUG nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.161935] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.182327] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.273718] env[62923]: DEBUG nova.compute.manager [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Received event network-changed-591b4e58-ab5d-4aac-8e11-df8af19f43eb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 712.274104] env[62923]: DEBUG nova.compute.manager [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Refreshing instance network info cache due to event network-changed-591b4e58-ab5d-4aac-8e11-df8af19f43eb. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 712.274104] env[62923]: DEBUG oslo_concurrency.lockutils [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] Acquiring lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.376891] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Releasing lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.376891] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 712.376891] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.376891] env[62923]: DEBUG oslo_concurrency.lockutils [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] Acquired lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.376891] env[62923]: DEBUG nova.network.neutron [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Refreshing network info cache for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.377261] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89bf42db-bf29-493e-816e-415f2ae7a893 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.386035] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88ac7d6-9aed-4ab1-a760-11f5c120eda8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.409411] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 627ebcab-90f9-4ebe-baf9-52fe808ec8c6 could not be found. [ 712.409631] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.409802] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 712.410081] env[62923]: DEBUG oslo.service.loopingcall [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.410313] env[62923]: DEBUG nova.compute.manager [-] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.410405] env[62923]: DEBUG nova.network.neutron [-] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.423916] env[62923]: DEBUG nova.network.neutron [-] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.447196] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquiring lock "08d39755-f94c-45aa-bfb5-f179e8a370db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.447196] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Lock "08d39755-f94c-45aa-bfb5-f179e8a370db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.452634] env[62923]: DEBUG nova.scheduler.client.report [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.688206] env[62923]: DEBUG nova.network.neutron [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.921216] env[62923]: DEBUG nova.network.neutron [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.928137] env[62923]: DEBUG nova.network.neutron [-] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.958857] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.943s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.959528] env[62923]: ERROR nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Traceback (most recent call last): [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self.driver.spawn(context, instance, image_meta, [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] vm_ref = self.build_virtual_machine(instance, [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.959528] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] for vif in network_info: [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return self._sync_wrapper(fn, *args, **kwargs) [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self.wait() [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self[:] = self._gt.wait() [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return self._exit_event.wait() [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] result = hub.switch() [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.959873] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return self.greenlet.switch() [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] result = function(*args, **kwargs) [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] return func(*args, **kwargs) [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] raise e [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] nwinfo = self.network_api.allocate_for_instance( [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] created_port_ids = self._update_ports_for_instance( [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] with excutils.save_and_reraise_exception(): [ 712.960257] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] self.force_reraise() [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] raise self.value [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] updated_port = self._update_port( [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] _ensure_no_port_binding_failure(port) [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] raise exception.PortBindingFailed(port_id=port['id']) [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] nova.exception.PortBindingFailed: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. [ 712.960582] env[62923]: ERROR nova.compute.manager [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] [ 712.960848] env[62923]: DEBUG nova.compute.utils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 712.961317] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.531s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.963172] env[62923]: INFO nova.compute.claims [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.965764] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Build of instance c11d0dcc-e5aa-4d7c-bba5-2853622dde44 was re-scheduled: Binding failed for port 30f00255-95f5-4dea-9292-b132c6e7cc70, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 712.966200] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 712.966419] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.966559] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquired lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.966712] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.055181] env[62923]: DEBUG nova.network.neutron [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.190815] env[62923]: INFO nova.compute.manager [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] [instance: 13e71116-cb20-4fc5-8ceb-3a6098bae438] Took 1.03 seconds to deallocate network for instance. [ 713.430809] env[62923]: INFO nova.compute.manager [-] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Took 1.02 seconds to deallocate network for instance. [ 713.433231] env[62923]: DEBUG nova.compute.claims [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 713.433404] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.487420] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.558731] env[62923]: DEBUG oslo_concurrency.lockutils [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] Releasing lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.558854] env[62923]: DEBUG nova.compute.manager [req-86e9157b-d050-4124-9b74-b76d2fdcf25d req-0b87d0b5-eaf6-4f81-a6f3-cde96d403959 service nova] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Received event network-vif-deleted-591b4e58-ab5d-4aac-8e11-df8af19f43eb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 713.590935] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.096634] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Releasing lock "refresh_cache-c11d0dcc-e5aa-4d7c-bba5-2853622dde44" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.100025] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.100025] env[62923]: DEBUG nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.100025] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.115348] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.220114] env[62923]: INFO nova.scheduler.client.report [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Deleted allocations for instance 13e71116-cb20-4fc5-8ceb-3a6098bae438 [ 714.320034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c05219-e690-42a9-81a1-e8a98caf4fbc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.327844] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ed843f-b6f0-4984-95a3-a6187c86a742 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.357561] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27ef57e-d097-4d07-9df0-f334f2f8e229 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.364620] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ee3310-eb8b-407e-a7a4-42e319ab2933 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.378564] env[62923]: DEBUG nova.compute.provider_tree [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.619627] env[62923]: DEBUG nova.network.neutron [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.728371] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d86f9797-196a-41f8-90e6-7740d8651f07 tempest-ServersTestManualDisk-1306562133 tempest-ServersTestManualDisk-1306562133-project-member] Lock "13e71116-cb20-4fc5-8ceb-3a6098bae438" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.422s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.881441] env[62923]: DEBUG nova.scheduler.client.report [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.122535] env[62923]: INFO nova.compute.manager [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: c11d0dcc-e5aa-4d7c-bba5-2853622dde44] Took 1.03 seconds to deallocate network for instance. [ 715.232570] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.388325] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.388851] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.391470] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.878s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.392801] env[62923]: INFO nova.compute.claims [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.750216] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.899067] env[62923]: DEBUG nova.compute.utils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.899067] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 715.899067] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 715.993590] env[62923]: DEBUG nova.policy [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e46f3881aec41d8be37a2f53265b333', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e421d924fa054e0d9aadefdda118b102', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 716.156214] env[62923]: INFO nova.scheduler.client.report [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Deleted allocations for instance c11d0dcc-e5aa-4d7c-bba5-2853622dde44 [ 716.402759] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.669926] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d4a8a44-1657-449d-a670-89eaee2a0a3b tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "c11d0dcc-e5aa-4d7c-bba5-2853622dde44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.633s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.766645] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5988ae-6d85-41ce-8328-1ecc3795b543 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.775431] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1c2de5-39ce-4a6c-952b-0f7d65bd0755 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.806565] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af72b0d7-2621-4f97-ab9f-d00d75f560cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.814455] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b27f25-c9dd-4b87-9ee5-5954bc06e1ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.831443] env[62923]: DEBUG nova.compute.provider_tree [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.995204] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Successfully created port: 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.174937] env[62923]: DEBUG nova.compute.manager [None req-19d3d9cd-0b44-409e-a4e4-0e4c73a02172 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: a004026e-ab4e-45b8-b4ab-d517496c9c7a] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.335059] env[62923]: DEBUG nova.scheduler.client.report [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.417732] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.446086] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.446086] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.446086] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.446382] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.446382] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.446382] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.446382] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.446382] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.446547] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.446547] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.446547] env[62923]: DEBUG nova.virt.hardware [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.447293] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad191781-43ef-493d-ac01-147470ac4aae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.456853] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0072783d-cbbb-431b-97e8-e065e89c894f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.680317] env[62923]: DEBUG nova.compute.manager [None req-19d3d9cd-0b44-409e-a4e4-0e4c73a02172 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: a004026e-ab4e-45b8-b4ab-d517496c9c7a] Instance disappeared before build. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 717.843441] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.843975] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 717.848046] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.564s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.849635] env[62923]: INFO nova.compute.claims [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.205188] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19d3d9cd-0b44-409e-a4e4-0e4c73a02172 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "a004026e-ab4e-45b8-b4ab-d517496c9c7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.466s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.283619] env[62923]: DEBUG nova.compute.manager [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Received event network-changed-0b11ddb2-dda0-428f-abd5-5dfdf1041aa6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 718.283819] env[62923]: DEBUG nova.compute.manager [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Refreshing instance network info cache due to event network-changed-0b11ddb2-dda0-428f-abd5-5dfdf1041aa6. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 718.284076] env[62923]: DEBUG oslo_concurrency.lockutils [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] Acquiring lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.284223] env[62923]: DEBUG oslo_concurrency.lockutils [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] Acquired lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.284379] env[62923]: DEBUG nova.network.neutron [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Refreshing network info cache for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.355470] env[62923]: DEBUG nova.compute.utils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.356797] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 718.356968] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.530423] env[62923]: DEBUG nova.policy [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e46f3881aec41d8be37a2f53265b333', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e421d924fa054e0d9aadefdda118b102', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 718.708132] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 718.863295] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 718.918606] env[62923]: DEBUG nova.network.neutron [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.024719] env[62923]: ERROR nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 719.024719] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.024719] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.024719] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.024719] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.024719] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.024719] env[62923]: ERROR nova.compute.manager raise self.value [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.024719] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 719.024719] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.024719] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 719.026257] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.026257] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 719.026257] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 719.026257] env[62923]: ERROR nova.compute.manager [ 719.026257] env[62923]: Traceback (most recent call last): [ 719.026257] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 719.026257] env[62923]: listener.cb(fileno) [ 719.026257] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.026257] env[62923]: result = function(*args, **kwargs) [ 719.026257] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.026257] env[62923]: return func(*args, **kwargs) [ 719.026257] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.026257] env[62923]: raise e [ 719.026257] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.026257] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 719.026257] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.026257] env[62923]: created_port_ids = self._update_ports_for_instance( [ 719.026257] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.026257] env[62923]: with excutils.save_and_reraise_exception(): [ 719.026257] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.026257] env[62923]: self.force_reraise() [ 719.026257] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.026257] env[62923]: raise self.value [ 719.026257] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.026257] env[62923]: updated_port = self._update_port( [ 719.026257] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.026257] env[62923]: _ensure_no_port_binding_failure(port) [ 719.026257] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.026257] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 719.027017] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 719.027017] env[62923]: Removing descriptor: 17 [ 719.027017] env[62923]: ERROR nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Traceback (most recent call last): [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] yield resources [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self.driver.spawn(context, instance, image_meta, [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self._vmops.spawn(context, instance, image_meta, injected_files, [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 719.027017] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] vm_ref = self.build_virtual_machine(instance, [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] vif_infos = vmwarevif.get_vif_info(self._session, [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] for vif in network_info: [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return self._sync_wrapper(fn, *args, **kwargs) [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self.wait() [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self[:] = self._gt.wait() [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return self._exit_event.wait() [ 719.031484] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] result = hub.switch() [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return self.greenlet.switch() [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] result = function(*args, **kwargs) [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return func(*args, **kwargs) [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] raise e [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] nwinfo = self.network_api.allocate_for_instance( [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 719.032421] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] created_port_ids = self._update_ports_for_instance( [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] with excutils.save_and_reraise_exception(): [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self.force_reraise() [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] raise self.value [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] updated_port = self._update_port( [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] _ensure_no_port_binding_failure(port) [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 719.032829] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] raise exception.PortBindingFailed(port_id=port['id']) [ 719.033246] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 719.033246] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] [ 719.033246] env[62923]: INFO nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Terminating instance [ 719.033246] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.086262] env[62923]: DEBUG nova.network.neutron [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.229872] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.272508] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b8cd05-9d8b-4c67-a431-6aa077bb318b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.280178] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c63b4b5-e5cb-46a4-bf1b-654f78b204ef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.312846] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Successfully created port: 1fac04a8-79b0-4aed-ac19-3a7c50287f9d {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.315444] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c7c7a1-03b2-4be0-b003-155ea96f46fe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.323828] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f2e62e-6aca-4957-aa1c-58da3a548800 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.337436] env[62923]: DEBUG nova.compute.provider_tree [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.592235] env[62923]: DEBUG oslo_concurrency.lockutils [req-f3838570-4378-4036-a25e-0ca4964ec91f req-afd4785e-ccb8-4e13-b542-27a657721cef service nova] Releasing lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.592235] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquired lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.592235] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.636794] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.636794] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.840986] env[62923]: DEBUG nova.scheduler.client.report [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.875589] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 719.906455] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 719.906693] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 719.906840] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.907022] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 719.907169] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.907316] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 719.907513] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 719.907652] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 719.907807] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 719.907964] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 719.911081] env[62923]: DEBUG nova.virt.hardware [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 719.911081] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a5e4cc-8188-4ff0-9e6d-3969c37c2f95 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.922217] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173940c7-2e02-4dc7-a4b9-141f86025e20 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.123109] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.142053] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.142381] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 720.142602] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 720.213692] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.346426] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.346796] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 720.349545] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.235s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.394193] env[62923]: DEBUG nova.compute.manager [req-33c5285e-0029-4a3c-8c2a-3c653eaa1f1e req-9ca83648-fac7-42b4-86a0-1ad514c537c4 service nova] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Received event network-vif-deleted-0b11ddb2-dda0-428f-abd5-5dfdf1041aa6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 720.436326] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "98974fb7-049a-4c72-a352-bc0a50d2a879" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.436538] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.651795] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652566] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652566] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652566] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652566] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652566] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652566] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652880] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: b48be393-189f-4093-b079-fe555192e7ed] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 720.652880] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Didn't find any instances for network info cache update. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 720.652996] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.653133] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.653275] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.653416] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.653551] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.653686] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.653806] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 720.653970] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.675477] env[62923]: ERROR nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 720.675477] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.675477] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.675477] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.675477] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.675477] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.675477] env[62923]: ERROR nova.compute.manager raise self.value [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.675477] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 720.675477] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.675477] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 720.676018] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.676018] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 720.676018] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 720.676018] env[62923]: ERROR nova.compute.manager [ 720.676018] env[62923]: Traceback (most recent call last): [ 720.676018] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 720.676018] env[62923]: listener.cb(fileno) [ 720.676018] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.676018] env[62923]: result = function(*args, **kwargs) [ 720.676018] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.676018] env[62923]: return func(*args, **kwargs) [ 720.676018] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.676018] env[62923]: raise e [ 720.676018] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.676018] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 720.676018] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.676018] env[62923]: created_port_ids = self._update_ports_for_instance( [ 720.676018] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.676018] env[62923]: with excutils.save_and_reraise_exception(): [ 720.676018] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.676018] env[62923]: self.force_reraise() [ 720.676018] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.676018] env[62923]: raise self.value [ 720.676018] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.676018] env[62923]: updated_port = self._update_port( [ 720.676018] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.676018] env[62923]: _ensure_no_port_binding_failure(port) [ 720.676018] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.676018] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 720.676826] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 720.676826] env[62923]: Removing descriptor: 18 [ 720.676826] env[62923]: ERROR nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Traceback (most recent call last): [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] yield resources [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self.driver.spawn(context, instance, image_meta, [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 720.676826] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] vm_ref = self.build_virtual_machine(instance, [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] vif_infos = vmwarevif.get_vif_info(self._session, [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] for vif in network_info: [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return self._sync_wrapper(fn, *args, **kwargs) [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self.wait() [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self[:] = self._gt.wait() [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return self._exit_event.wait() [ 720.677174] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] result = hub.switch() [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return self.greenlet.switch() [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] result = function(*args, **kwargs) [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return func(*args, **kwargs) [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] raise e [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] nwinfo = self.network_api.allocate_for_instance( [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.677524] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] created_port_ids = self._update_ports_for_instance( [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] with excutils.save_and_reraise_exception(): [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self.force_reraise() [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] raise self.value [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] updated_port = self._update_port( [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] _ensure_no_port_binding_failure(port) [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.677870] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] raise exception.PortBindingFailed(port_id=port['id']) [ 720.678197] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 720.678197] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] [ 720.678197] env[62923]: INFO nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Terminating instance [ 720.678777] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.678931] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquired lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.679107] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 720.716375] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Releasing lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.716767] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 720.716997] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.717293] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e7ffea5-7637-4658-b13d-cde597c350ad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.727051] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3bcada-8f75-4856-9da6-5c9e356ac71c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.747439] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c370e9a9-3c09-418c-b2fc-e75323298518 could not be found. [ 720.747590] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 720.748013] env[62923]: INFO nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Took 0.03 seconds to destroy the instance on the hypervisor. [ 720.748013] env[62923]: DEBUG oslo.service.loopingcall [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 720.748201] env[62923]: DEBUG nova.compute.manager [-] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 720.748265] env[62923]: DEBUG nova.network.neutron [-] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 720.767543] env[62923]: DEBUG nova.network.neutron [-] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.856148] env[62923]: DEBUG nova.compute.utils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.860205] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 720.860376] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 720.913657] env[62923]: DEBUG nova.policy [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7586849ffa63400e890616df1a5e8c59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b5f5b3d8f59493683027a660f8b8274', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 721.156660] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.172371] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bb7252-4439-4092-8c37-c1c581fa3c14 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.180054] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c979928e-5d82-4c6c-91b9-c4118adf37a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.222479] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.227809] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b578e09-0e79-4aeb-a004-7633ff25ef59 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.235529] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26226a5c-1a11-457e-a6db-dcba5576a07b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.249234] env[62923]: DEBUG nova.compute.provider_tree [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.269674] env[62923]: DEBUG nova.network.neutron [-] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.282924] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Successfully created port: b47d0b6e-9eac-4506-b983-6bddfe19f795 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.334131] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.360906] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.752808] env[62923]: DEBUG nova.scheduler.client.report [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 721.772364] env[62923]: INFO nova.compute.manager [-] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Took 1.02 seconds to deallocate network for instance. [ 721.774459] env[62923]: DEBUG nova.compute.claims [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 721.774634] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.840022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Releasing lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.840022] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 721.840022] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 721.840022] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ea1548a-6a76-423f-a859-abcc0b27bef7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.848028] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb5db10-849c-4a0a-9f2e-f99c08b24e30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.868333] env[62923]: INFO nova.virt.block_device [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Booting with volume aa425a3d-1cae-44c2-aaae-1815c2ed4319 at /dev/sda [ 721.875317] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f853c572-ad40-4cce-83d4-d5f11b42c37f could not be found. [ 721.875548] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.875719] env[62923]: INFO nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 721.875958] env[62923]: DEBUG oslo.service.loopingcall [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 721.876178] env[62923]: DEBUG nova.compute.manager [-] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 721.876328] env[62923]: DEBUG nova.network.neutron [-] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.905762] env[62923]: DEBUG nova.network.neutron [-] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.919519] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87a6ac3c-33a2-4bfd-923e-0d8ef107a7fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.934708] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24be99b-6e85-40ee-b354-03ad24ff0946 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.956075] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0856374-dc5f-4bcb-af01-8d6e31b4f4c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.964894] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd4edb7-53ed-49b6-9111-09ce34fc8fd5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.987222] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faeeb43-bc95-41b9-b3fa-46ae9fff0939 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.994158] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583dfe97-9ada-4fc5-b4e0-9e9807e26e67 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.007738] env[62923]: DEBUG nova.virt.block_device [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Updating existing volume attachment record: 537a752a-3503-43a5-bf65-09055811adda {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 722.259061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.909s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.259440] env[62923]: ERROR nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Traceback (most recent call last): [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self.driver.spawn(context, instance, image_meta, [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] vm_ref = self.build_virtual_machine(instance, [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] vif_infos = vmwarevif.get_vif_info(self._session, [ 722.259440] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] for vif in network_info: [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return self._sync_wrapper(fn, *args, **kwargs) [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self.wait() [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self[:] = self._gt.wait() [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return self._exit_event.wait() [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] result = hub.switch() [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 722.259837] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return self.greenlet.switch() [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] result = function(*args, **kwargs) [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] return func(*args, **kwargs) [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] raise e [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] nwinfo = self.network_api.allocate_for_instance( [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] created_port_ids = self._update_ports_for_instance( [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] with excutils.save_and_reraise_exception(): [ 722.260223] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] self.force_reraise() [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] raise self.value [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] updated_port = self._update_port( [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] _ensure_no_port_binding_failure(port) [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] raise exception.PortBindingFailed(port_id=port['id']) [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] nova.exception.PortBindingFailed: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. [ 722.260584] env[62923]: ERROR nova.compute.manager [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] [ 722.260949] env[62923]: DEBUG nova.compute.utils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 722.261913] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.516s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.264188] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Build of instance 81c87881-bf63-4622-a0cb-6e38680a8f14 was re-scheduled: Binding failed for port 90939492-8b35-4150-bcd7-213d46d845ef, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 722.265152] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 722.265383] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquiring lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.265528] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Acquired lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.265683] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.415019] env[62923]: DEBUG nova.network.neutron [-] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.424241] env[62923]: DEBUG nova.compute.manager [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Received event network-changed-1fac04a8-79b0-4aed-ac19-3a7c50287f9d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 722.424432] env[62923]: DEBUG nova.compute.manager [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Refreshing instance network info cache due to event network-changed-1fac04a8-79b0-4aed-ac19-3a7c50287f9d. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 722.424632] env[62923]: DEBUG oslo_concurrency.lockutils [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] Acquiring lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.424769] env[62923]: DEBUG oslo_concurrency.lockutils [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] Acquired lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.424948] env[62923]: DEBUG nova.network.neutron [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Refreshing network info cache for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 722.444490] env[62923]: ERROR nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 722.444490] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.444490] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.444490] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.444490] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.444490] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.444490] env[62923]: ERROR nova.compute.manager raise self.value [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.444490] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 722.444490] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.444490] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 722.445164] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.445164] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 722.445164] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 722.445164] env[62923]: ERROR nova.compute.manager [ 722.445164] env[62923]: Traceback (most recent call last): [ 722.445164] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 722.445164] env[62923]: listener.cb(fileno) [ 722.445164] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.445164] env[62923]: result = function(*args, **kwargs) [ 722.445164] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 722.445164] env[62923]: return func(*args, **kwargs) [ 722.445164] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.445164] env[62923]: raise e [ 722.445164] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.445164] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 722.445164] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.445164] env[62923]: created_port_ids = self._update_ports_for_instance( [ 722.445164] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.445164] env[62923]: with excutils.save_and_reraise_exception(): [ 722.445164] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.445164] env[62923]: self.force_reraise() [ 722.445164] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.445164] env[62923]: raise self.value [ 722.445164] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.445164] env[62923]: updated_port = self._update_port( [ 722.445164] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.445164] env[62923]: _ensure_no_port_binding_failure(port) [ 722.445164] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.445164] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 722.446126] env[62923]: nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 722.446126] env[62923]: Removing descriptor: 18 [ 722.787688] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.881312] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.915381] env[62923]: INFO nova.compute.manager [-] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Took 1.04 seconds to deallocate network for instance. [ 722.917739] env[62923]: DEBUG nova.compute.claims [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 722.917922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.093627] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631d828c-2f78-4585-8996-270a2b3d0a35 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.104825] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e6153c-44b9-4dfb-84f4-c294edb17221 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.112426] env[62923]: DEBUG nova.network.neutron [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.145398] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54329a16-3835-4972-a561-0b573867917b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.153976] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e0a98c-4bf9-4117-b3b9-9852346898f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.168661] env[62923]: DEBUG nova.compute.provider_tree [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.210028] env[62923]: DEBUG nova.network.neutron [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.384048] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Releasing lock "refresh_cache-81c87881-bf63-4622-a0cb-6e38680a8f14" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.384349] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 723.384533] env[62923]: DEBUG nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 723.384700] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.401071] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.672246] env[62923]: DEBUG nova.scheduler.client.report [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.712873] env[62923]: DEBUG oslo_concurrency.lockutils [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] Releasing lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.713161] env[62923]: DEBUG nova.compute.manager [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Received event network-vif-deleted-1fac04a8-79b0-4aed-ac19-3a7c50287f9d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.713343] env[62923]: DEBUG nova.compute.manager [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: b48be393-189f-4093-b079-fe555192e7ed] Received event network-changed-b47d0b6e-9eac-4506-b983-6bddfe19f795 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.713500] env[62923]: DEBUG nova.compute.manager [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: b48be393-189f-4093-b079-fe555192e7ed] Refreshing instance network info cache due to event network-changed-b47d0b6e-9eac-4506-b983-6bddfe19f795. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 723.713697] env[62923]: DEBUG oslo_concurrency.lockutils [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] Acquiring lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.713836] env[62923]: DEBUG oslo_concurrency.lockutils [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] Acquired lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.714054] env[62923]: DEBUG nova.network.neutron [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: b48be393-189f-4093-b079-fe555192e7ed] Refreshing network info cache for port b47d0b6e-9eac-4506-b983-6bddfe19f795 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.903242] env[62923]: DEBUG nova.network.neutron [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.153497] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 724.154156] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.154374] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.154524] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.154698] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.154842] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.154983] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.155201] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.155355] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.156028] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.156028] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.156028] env[62923]: DEBUG nova.virt.hardware [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.156742] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca939e8-531d-40ab-bc0d-c8d8ad4b80bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.166393] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20104f95-aa54-4042-a5af-e80d6fae10c9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.180372] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.919s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.181046] env[62923]: ERROR nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Traceback (most recent call last): [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self.driver.spawn(context, instance, image_meta, [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] vm_ref = self.build_virtual_machine(instance, [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.181046] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] for vif in network_info: [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return self._sync_wrapper(fn, *args, **kwargs) [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self.wait() [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self[:] = self._gt.wait() [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return self._exit_event.wait() [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] result = hub.switch() [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.181490] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return self.greenlet.switch() [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] result = function(*args, **kwargs) [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] return func(*args, **kwargs) [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] raise e [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] nwinfo = self.network_api.allocate_for_instance( [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] created_port_ids = self._update_ports_for_instance( [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] with excutils.save_and_reraise_exception(): [ 724.181960] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] self.force_reraise() [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] raise self.value [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] updated_port = self._update_port( [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] _ensure_no_port_binding_failure(port) [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] raise exception.PortBindingFailed(port_id=port['id']) [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] nova.exception.PortBindingFailed: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. [ 724.182356] env[62923]: ERROR nova.compute.manager [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] [ 724.182701] env[62923]: DEBUG nova.compute.utils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 724.183411] env[62923]: ERROR nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] Traceback (most recent call last): [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] yield resources [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self.driver.spawn(context, instance, image_meta, [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] vm_ref = self.build_virtual_machine(instance, [ 724.183411] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] for vif in network_info: [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] return self._sync_wrapper(fn, *args, **kwargs) [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self.wait() [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self[:] = self._gt.wait() [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] return self._exit_event.wait() [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 724.183804] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] current.throw(*self._exc) [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] result = function(*args, **kwargs) [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] return func(*args, **kwargs) [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] raise e [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] nwinfo = self.network_api.allocate_for_instance( [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] created_port_ids = self._update_ports_for_instance( [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] with excutils.save_and_reraise_exception(): [ 724.184316] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self.force_reraise() [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] raise self.value [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] updated_port = self._update_port( [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] _ensure_no_port_binding_failure(port) [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] raise exception.PortBindingFailed(port_id=port['id']) [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 724.184737] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] [ 724.184737] env[62923]: INFO nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Terminating instance [ 724.186200] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Build of instance d45fe9ea-8538-47da-b8dd-c67f8863a812 was re-scheduled: Binding failed for port 43314fbb-c56a-40d4-be53-83e2ff602344, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 724.186598] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 724.186889] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.187083] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.187253] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.188177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.531s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.199157] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquiring lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.241168] env[62923]: DEBUG nova.network.neutron [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.343379] env[62923]: DEBUG nova.network.neutron [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] [instance: b48be393-189f-4093-b079-fe555192e7ed] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.406127] env[62923]: INFO nova.compute.manager [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] [instance: 81c87881-bf63-4622-a0cb-6e38680a8f14] Took 1.02 seconds to deallocate network for instance. [ 724.446796] env[62923]: DEBUG nova.compute.manager [req-7786cf28-6cc8-4760-8747-d0c79f2d29f5 req-66e60f59-4ec6-4f08-8594-5c562061be70 service nova] [instance: b48be393-189f-4093-b079-fe555192e7ed] Received event network-vif-deleted-b47d0b6e-9eac-4506-b983-6bddfe19f795 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 724.707798] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 724.782155] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.846160] env[62923]: DEBUG oslo_concurrency.lockutils [req-9c054ebd-c544-4058-b73f-690e933ef170 req-145a32b4-db46-4c1a-959c-f08326a12e11 service nova] Releasing lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.846529] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquired lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.847068] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.979675] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13b47b5-9f87-43a8-a613-4742a21bcd6f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.987364] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30d59bb-2e74-46c9-86de-e8120387c970 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.018305] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be59cf5-7656-4c9a-b0c7-55e607ae93cc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.025810] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec0cdd9-540f-4755-97a1-78294a7fa6e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.038656] env[62923]: DEBUG nova.compute.provider_tree [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.285473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Releasing lock "refresh_cache-d45fe9ea-8538-47da-b8dd-c67f8863a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.285746] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 725.285901] env[62923]: DEBUG nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 725.286077] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.310173] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.366599] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.432952] env[62923]: INFO nova.scheduler.client.report [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Deleted allocations for instance 81c87881-bf63-4622-a0cb-6e38680a8f14 [ 725.482669] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.542135] env[62923]: DEBUG nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.812795] env[62923]: DEBUG nova.network.neutron [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.940215] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44b5e2ad-d4e7-42a6-90de-003c4add526c tempest-InstanceActionsNegativeTestJSON-588136524 tempest-InstanceActionsNegativeTestJSON-588136524-project-member] Lock "81c87881-bf63-4622-a0cb-6e38680a8f14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.484s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.984928] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Releasing lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.985536] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 725.985840] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9713c0d1-ec9e-4912-881b-5c431d8ccd27 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.994432] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0796e200-1496-4955-8c6a-e721c701b691 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.015125] env[62923]: WARNING nova.virt.vmwareapi.driver [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance b48be393-189f-4093-b079-fe555192e7ed could not be found. [ 726.015334] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 726.015854] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e57be922-3e02-46b7-894d-49efe2217d21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.022949] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6108a04a-1161-4722-bc6f-0777ab8f132e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.043954] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b48be393-189f-4093-b079-fe555192e7ed could not be found. [ 726.044171] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.044361] env[62923]: INFO nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Took 0.06 seconds to destroy the instance on the hypervisor. [ 726.045460] env[62923]: DEBUG oslo.service.loopingcall [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.045460] env[62923]: DEBUG nova.compute.manager [-] [instance: b48be393-189f-4093-b079-fe555192e7ed] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 726.045460] env[62923]: DEBUG nova.network.neutron [-] [instance: b48be393-189f-4093-b079-fe555192e7ed] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.049239] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.049800] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Traceback (most recent call last): [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self.driver.spawn(context, instance, image_meta, [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] vm_ref = self.build_virtual_machine(instance, [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] vif_infos = vmwarevif.get_vif_info(self._session, [ 726.049800] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] for vif in network_info: [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return self._sync_wrapper(fn, *args, **kwargs) [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self.wait() [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self[:] = self._gt.wait() [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return self._exit_event.wait() [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] result = hub.switch() [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 726.050227] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return self.greenlet.switch() [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] result = function(*args, **kwargs) [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] return func(*args, **kwargs) [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] raise e [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] nwinfo = self.network_api.allocate_for_instance( [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] created_port_ids = self._update_ports_for_instance( [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] with excutils.save_and_reraise_exception(): [ 726.050732] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] self.force_reraise() [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] raise self.value [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] updated_port = self._update_port( [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] _ensure_no_port_binding_failure(port) [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] raise exception.PortBindingFailed(port_id=port['id']) [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] nova.exception.PortBindingFailed: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. [ 726.051242] env[62923]: ERROR nova.compute.manager [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] [ 726.051593] env[62923]: DEBUG nova.compute.utils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 726.051593] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.763s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.055875] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Build of instance 325e8102-c129-40f4-b61d-1976d2a1fe42 was re-scheduled: Binding failed for port 5596e201-05dc-4c8d-8217-4f72482468cf, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 726.056362] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 726.056543] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.056691] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquired lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.056840] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.059639] env[62923]: DEBUG nova.network.neutron [-] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.316703] env[62923]: INFO nova.compute.manager [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: d45fe9ea-8538-47da-b8dd-c67f8863a812] Took 1.03 seconds to deallocate network for instance. [ 726.443187] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 726.562268] env[62923]: DEBUG nova.network.neutron [-] [instance: b48be393-189f-4093-b079-fe555192e7ed] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.582621] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.752288] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.883475] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417f1928-d91e-49bc-9159-32615cd4a5e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.893017] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d6af70-7a97-4499-a060-1c2d65963da8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.922661] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e53b2ce-d842-4f34-befc-29ebb2cfceac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.930580] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33776ed3-f95e-40fb-8032-6d0828b27823 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.944929] env[62923]: DEBUG nova.compute.provider_tree [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.960818] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.065206] env[62923]: INFO nova.compute.manager [-] [instance: b48be393-189f-4093-b079-fe555192e7ed] Took 1.02 seconds to deallocate network for instance. [ 727.255274] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Releasing lock "refresh_cache-325e8102-c129-40f4-b61d-1976d2a1fe42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.255528] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 727.255710] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 727.255871] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.273119] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.366668] env[62923]: INFO nova.scheduler.client.report [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Deleted allocations for instance d45fe9ea-8538-47da-b8dd-c67f8863a812 [ 727.448705] env[62923]: DEBUG nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.636751] env[62923]: INFO nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Took 0.57 seconds to detach 1 volumes for instance. [ 727.639015] env[62923]: DEBUG nova.compute.claims [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 727.639157] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.775408] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.876514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b169f017-896e-43d7-92ca-0ca9a3ad50e0 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "d45fe9ea-8538-47da-b8dd-c67f8863a812" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.719s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.954938] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.903s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.955697] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Traceback (most recent call last): [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self.driver.spawn(context, instance, image_meta, [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] vm_ref = self.build_virtual_machine(instance, [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.955697] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] for vif in network_info: [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return self._sync_wrapper(fn, *args, **kwargs) [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self.wait() [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self[:] = self._gt.wait() [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return self._exit_event.wait() [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] result = hub.switch() [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.956050] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return self.greenlet.switch() [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] result = function(*args, **kwargs) [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] return func(*args, **kwargs) [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] raise e [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] nwinfo = self.network_api.allocate_for_instance( [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] created_port_ids = self._update_ports_for_instance( [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] with excutils.save_and_reraise_exception(): [ 727.956501] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] self.force_reraise() [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] raise self.value [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] updated_port = self._update_port( [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] _ensure_no_port_binding_failure(port) [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] raise exception.PortBindingFailed(port_id=port['id']) [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] nova.exception.PortBindingFailed: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. [ 727.956861] env[62923]: ERROR nova.compute.manager [instance: fa7295fe-b893-455b-9d4b-4013c187c288] [ 727.957169] env[62923]: DEBUG nova.compute.utils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.958368] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.524s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.961059] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Build of instance fa7295fe-b893-455b-9d4b-4013c187c288 was re-scheduled: Binding failed for port a862aa45-f5eb-449d-817a-ed70359fc14c, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 727.961507] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 727.961725] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.961867] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquired lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.962053] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.281105] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 325e8102-c129-40f4-b61d-1976d2a1fe42] Took 1.02 seconds to deallocate network for instance. [ 728.379630] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 728.495193] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.711289] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.843671] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672fa68e-2962-4d61-8f5c-37ff4e8e927e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.852144] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fffb21-4cb3-4b82-878d-678833d37934 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.884126] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2af968-ecc3-4436-93ac-451b00120cc5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.895112] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104fbc10-0436-4e17-91d0-8713c19eb5a1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.909184] env[62923]: DEBUG nova.compute.provider_tree [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.911221] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.217730] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Releasing lock "refresh_cache-fa7295fe-b893-455b-9d4b-4013c187c288" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.217730] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 729.217730] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.217864] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.241396] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.319507] env[62923]: INFO nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Deleted allocations for instance 325e8102-c129-40f4-b61d-1976d2a1fe42 [ 729.414937] env[62923]: DEBUG nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.745040] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.830232] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "325e8102-c129-40f4-b61d-1976d2a1fe42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.855s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.923183] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.923183] env[62923]: ERROR nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Traceback (most recent call last): [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self.driver.spawn(context, instance, image_meta, [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.923183] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] vm_ref = self.build_virtual_machine(instance, [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] for vif in network_info: [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return self._sync_wrapper(fn, *args, **kwargs) [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self.wait() [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self[:] = self._gt.wait() [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return self._exit_event.wait() [ 729.923680] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] result = hub.switch() [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return self.greenlet.switch() [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] result = function(*args, **kwargs) [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] return func(*args, **kwargs) [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] raise e [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] nwinfo = self.network_api.allocate_for_instance( [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.924063] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] created_port_ids = self._update_ports_for_instance( [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] with excutils.save_and_reraise_exception(): [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] self.force_reraise() [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] raise self.value [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] updated_port = self._update_port( [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] _ensure_no_port_binding_failure(port) [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.924418] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] raise exception.PortBindingFailed(port_id=port['id']) [ 729.924742] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] nova.exception.PortBindingFailed: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. [ 729.924742] env[62923]: ERROR nova.compute.manager [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] [ 729.924742] env[62923]: DEBUG nova.compute.utils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 729.924742] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.173s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.930176] env[62923]: INFO nova.compute.claims [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.935311] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Build of instance 627ebcab-90f9-4ebe-baf9-52fe808ec8c6 was re-scheduled: Binding failed for port 591b4e58-ab5d-4aac-8e11-df8af19f43eb, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 729.935829] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 729.936079] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquiring lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.936222] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Acquired lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.936415] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.247599] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: fa7295fe-b893-455b-9d4b-4013c187c288] Took 1.03 seconds to deallocate network for instance. [ 730.333705] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 730.465086] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.544179] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.863999] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.049179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Releasing lock "refresh_cache-627ebcab-90f9-4ebe-baf9-52fe808ec8c6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.049179] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 731.049179] env[62923]: DEBUG nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.049801] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 731.081330] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.167701] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.167928] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.285675] env[62923]: INFO nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Deleted allocations for instance fa7295fe-b893-455b-9d4b-4013c187c288 [ 731.340072] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669fbdb8-c3ad-476c-bea4-6610dc7461cd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.346428] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f8ed08-9e7a-446c-8017-e59a7ab4b054 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.380113] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3ce349-9ef9-48da-86cb-83ac3542c070 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.388996] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f607a9a-d8a3-4319-816d-1bcda833011f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.404382] env[62923]: DEBUG nova.compute.provider_tree [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.553211] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.553483] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.583857] env[62923]: DEBUG nova.network.neutron [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.800121] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "fa7295fe-b893-455b-9d4b-4013c187c288" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.790s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.907804] env[62923]: DEBUG nova.scheduler.client.report [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.086319] env[62923]: INFO nova.compute.manager [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] [instance: 627ebcab-90f9-4ebe-baf9-52fe808ec8c6] Took 1.04 seconds to deallocate network for instance. [ 732.304587] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 732.414018] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.414471] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 732.417007] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.187s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.418379] env[62923]: INFO nova.compute.claims [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.827066] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.926041] env[62923]: DEBUG nova.compute.utils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 732.927349] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 732.927521] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.974530] env[62923]: DEBUG nova.policy [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ad76ea94b62472fa3318cbbdb308ebe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1559d2844647aba922cae8e9d992e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 733.117318] env[62923]: INFO nova.scheduler.client.report [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Deleted allocations for instance 627ebcab-90f9-4ebe-baf9-52fe808ec8c6 [ 733.379074] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Successfully created port: b6985535-654b-4dcd-bc76-0ae48fb21c12 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.432972] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 733.627742] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e74e0b3f-0162-495e-a6a9-b4c99ae0f6f2 tempest-ListServersNegativeTestJSON-762248896 tempest-ListServersNegativeTestJSON-762248896-project-member] Lock "627ebcab-90f9-4ebe-baf9-52fe808ec8c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.580s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.742962] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2f21be-e5ca-4025-b3e0-74c80ad05b62 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.751024] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388eeac6-bab4-4d88-aa7f-bcccb93885f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.784046] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea92476-cbe7-401f-854d-303f05dddf4b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.791439] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5273c8cf-1cfb-4849-91dc-aa382207a3d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.805149] env[62923]: DEBUG nova.compute.provider_tree [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.129367] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.308544] env[62923]: DEBUG nova.scheduler.client.report [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.347866] env[62923]: DEBUG nova.compute.manager [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Received event network-changed-b6985535-654b-4dcd-bc76-0ae48fb21c12 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 734.348124] env[62923]: DEBUG nova.compute.manager [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Refreshing instance network info cache due to event network-changed-b6985535-654b-4dcd-bc76-0ae48fb21c12. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 734.348391] env[62923]: DEBUG oslo_concurrency.lockutils [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] Acquiring lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.348589] env[62923]: DEBUG oslo_concurrency.lockutils [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] Acquired lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.348827] env[62923]: DEBUG nova.network.neutron [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Refreshing network info cache for port b6985535-654b-4dcd-bc76-0ae48fb21c12 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.442119] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 734.478990] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 734.478990] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 734.478990] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.478990] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 734.479321] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.479321] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 734.479321] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 734.479321] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 734.479778] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 734.479973] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 734.480213] env[62923]: DEBUG nova.virt.hardware [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 734.481064] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc9641d-213a-4272-a3cd-fabd9fb45300 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.489493] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ede768-e548-4b6a-8280-6d675c07bb48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.554929] env[62923]: ERROR nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 734.554929] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.554929] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.554929] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.554929] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.554929] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.554929] env[62923]: ERROR nova.compute.manager raise self.value [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.554929] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 734.554929] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.554929] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 734.555547] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.555547] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 734.555547] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 734.555547] env[62923]: ERROR nova.compute.manager [ 734.555547] env[62923]: Traceback (most recent call last): [ 734.555547] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 734.555547] env[62923]: listener.cb(fileno) [ 734.555547] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.555547] env[62923]: result = function(*args, **kwargs) [ 734.555547] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 734.555547] env[62923]: return func(*args, **kwargs) [ 734.555547] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 734.555547] env[62923]: raise e [ 734.555547] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.555547] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 734.555547] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.555547] env[62923]: created_port_ids = self._update_ports_for_instance( [ 734.555547] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.555547] env[62923]: with excutils.save_and_reraise_exception(): [ 734.555547] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.555547] env[62923]: self.force_reraise() [ 734.555547] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.555547] env[62923]: raise self.value [ 734.555547] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.555547] env[62923]: updated_port = self._update_port( [ 734.555547] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.555547] env[62923]: _ensure_no_port_binding_failure(port) [ 734.555547] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.555547] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 734.556494] env[62923]: nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 734.556494] env[62923]: Removing descriptor: 18 [ 734.556494] env[62923]: ERROR nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Traceback (most recent call last): [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] yield resources [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self.driver.spawn(context, instance, image_meta, [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 734.556494] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] vm_ref = self.build_virtual_machine(instance, [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] vif_infos = vmwarevif.get_vif_info(self._session, [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] for vif in network_info: [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return self._sync_wrapper(fn, *args, **kwargs) [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self.wait() [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self[:] = self._gt.wait() [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return self._exit_event.wait() [ 734.556908] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] result = hub.switch() [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return self.greenlet.switch() [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] result = function(*args, **kwargs) [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return func(*args, **kwargs) [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] raise e [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] nwinfo = self.network_api.allocate_for_instance( [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.557543] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] created_port_ids = self._update_ports_for_instance( [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] with excutils.save_and_reraise_exception(): [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self.force_reraise() [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] raise self.value [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] updated_port = self._update_port( [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] _ensure_no_port_binding_failure(port) [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.557978] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] raise exception.PortBindingFailed(port_id=port['id']) [ 734.558378] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 734.558378] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] [ 734.558378] env[62923]: INFO nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Terminating instance [ 734.558378] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.651137] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.819351] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.819871] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 734.822623] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.666s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.822793] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.822945] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 734.823374] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.049s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.826825] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffbefd7-7cbc-4159-9751-bff72a7396cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.835755] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36493797-5dbc-40e4-8955-32e76387b223 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.850166] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9027800d-ddc6-4842-924c-3fe4de67704d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.858131] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38134eb-eb02-4e9b-8883-ffb172cd4055 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.893715] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181488MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 734.893715] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.894803] env[62923]: DEBUG nova.network.neutron [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.983433] env[62923]: DEBUG nova.network.neutron [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.327808] env[62923]: DEBUG nova.compute.utils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.329200] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 735.329376] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.406069] env[62923]: DEBUG nova.policy [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45458b16e5834b11a26bea3468558efa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c41559af62b24a90adea8d6dba02413a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 735.486821] env[62923]: DEBUG oslo_concurrency.lockutils [req-20d7a545-a5e6-4f10-9383-5442a6c147af req-35f1619d-f987-42bf-8aab-1e7b2e4cc0a1 service nova] Releasing lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.487279] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.487427] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.711912] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4988e916-758a-4d4f-8901-61154f4af86a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.724527] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa11c228-e782-4e75-84e7-d6a99c42adb1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.754623] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474e1161-3d25-4e81-95f2-c6cfda179174 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.763025] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6306af98-bb8f-4b8d-9e29-51b3c007bb69 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.776103] env[62923]: DEBUG nova.compute.provider_tree [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.777954] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Successfully created port: 5866fb2d-9382-4b20-9cf1-def904197df6 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 735.833395] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 736.008767] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.139063] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.281466] env[62923]: DEBUG nova.scheduler.client.report [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.380909] env[62923]: DEBUG nova.compute.manager [req-e95e1a53-316f-4690-97af-a284af09390f req-b14a6a35-771f-4f47-82cb-fd18f1179cb3 service nova] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Received event network-vif-deleted-b6985535-654b-4dcd-bc76-0ae48fb21c12 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 736.642231] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.642708] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 736.642834] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 736.643207] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ffc0fe4-41d5-4ae9-94a8-90b2c311fffa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.652209] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44dab6e9-124d-465a-8eaa-cde7bc2f1f93 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.674283] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a701b2b9-10df-4ba3-8b78-b6b486d8f1db could not be found. [ 736.674556] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 736.674744] env[62923]: INFO nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Took 0.03 seconds to destroy the instance on the hypervisor. [ 736.675026] env[62923]: DEBUG oslo.service.loopingcall [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.675278] env[62923]: DEBUG nova.compute.manager [-] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 736.675376] env[62923]: DEBUG nova.network.neutron [-] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.699100] env[62923]: DEBUG nova.network.neutron [-] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.786610] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.963s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.787259] env[62923]: ERROR nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Traceback (most recent call last): [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self.driver.spawn(context, instance, image_meta, [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] vm_ref = self.build_virtual_machine(instance, [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.787259] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] for vif in network_info: [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return self._sync_wrapper(fn, *args, **kwargs) [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self.wait() [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self[:] = self._gt.wait() [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return self._exit_event.wait() [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] result = hub.switch() [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.787695] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return self.greenlet.switch() [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] result = function(*args, **kwargs) [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] return func(*args, **kwargs) [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] raise e [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] nwinfo = self.network_api.allocate_for_instance( [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] created_port_ids = self._update_ports_for_instance( [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] with excutils.save_and_reraise_exception(): [ 736.788310] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] self.force_reraise() [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] raise self.value [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] updated_port = self._update_port( [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] _ensure_no_port_binding_failure(port) [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] raise exception.PortBindingFailed(port_id=port['id']) [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] nova.exception.PortBindingFailed: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. [ 736.788778] env[62923]: ERROR nova.compute.manager [instance: c370e9a9-3c09-418c-b2fc-e75323298518] [ 736.789235] env[62923]: DEBUG nova.compute.utils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.789402] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.871s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.792739] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Build of instance c370e9a9-3c09-418c-b2fc-e75323298518 was re-scheduled: Binding failed for port 0b11ddb2-dda0-428f-abd5-5dfdf1041aa6, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 736.793270] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 736.793533] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.793713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquired lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.793899] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.842024] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 736.866718] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 736.866949] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 736.867136] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.867367] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 736.867539] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.867685] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 736.867880] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 736.868049] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 736.868216] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 736.868372] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 736.868833] env[62923]: DEBUG nova.virt.hardware [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 736.869376] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7c4193-4e2b-425e-a6c4-bd8f0c205815 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.878891] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bee7230-17e8-4724-90c2-dd6eff5ec1b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.071765] env[62923]: ERROR nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 737.071765] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.071765] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.071765] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.071765] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.071765] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.071765] env[62923]: ERROR nova.compute.manager raise self.value [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.071765] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 737.071765] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.071765] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 737.072317] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.072317] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 737.072317] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 737.072317] env[62923]: ERROR nova.compute.manager [ 737.072317] env[62923]: Traceback (most recent call last): [ 737.072317] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 737.072317] env[62923]: listener.cb(fileno) [ 737.072317] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.072317] env[62923]: result = function(*args, **kwargs) [ 737.072317] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 737.072317] env[62923]: return func(*args, **kwargs) [ 737.072317] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.072317] env[62923]: raise e [ 737.072317] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.072317] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 737.072317] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.072317] env[62923]: created_port_ids = self._update_ports_for_instance( [ 737.072317] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.072317] env[62923]: with excutils.save_and_reraise_exception(): [ 737.072317] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.072317] env[62923]: self.force_reraise() [ 737.072317] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.072317] env[62923]: raise self.value [ 737.072317] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.072317] env[62923]: updated_port = self._update_port( [ 737.072317] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.072317] env[62923]: _ensure_no_port_binding_failure(port) [ 737.072317] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.072317] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 737.073549] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 737.073549] env[62923]: Removing descriptor: 18 [ 737.073549] env[62923]: ERROR nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Traceback (most recent call last): [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] yield resources [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self.driver.spawn(context, instance, image_meta, [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.073549] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] vm_ref = self.build_virtual_machine(instance, [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] for vif in network_info: [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return self._sync_wrapper(fn, *args, **kwargs) [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self.wait() [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self[:] = self._gt.wait() [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return self._exit_event.wait() [ 737.074124] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] result = hub.switch() [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return self.greenlet.switch() [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] result = function(*args, **kwargs) [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return func(*args, **kwargs) [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] raise e [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] nwinfo = self.network_api.allocate_for_instance( [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 737.074539] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] created_port_ids = self._update_ports_for_instance( [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] with excutils.save_and_reraise_exception(): [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self.force_reraise() [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] raise self.value [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] updated_port = self._update_port( [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] _ensure_no_port_binding_failure(port) [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.074962] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] raise exception.PortBindingFailed(port_id=port['id']) [ 737.075519] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 737.075519] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] [ 737.075519] env[62923]: INFO nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Terminating instance [ 737.076963] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquiring lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.077078] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquired lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.077663] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.203850] env[62923]: DEBUG nova.network.neutron [-] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.318017] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.396816] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.548854] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71826295-311b-405d-98f8-27f79d05ffc1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.556263] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a49b44c-50b2-4227-9cf4-1d986d753580 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.587307] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703f208e-f41f-411b-a9a9-a93be2cef5e6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.595069] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092a04be-1f43-483d-b881-21673d13724d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.608479] env[62923]: DEBUG nova.compute.provider_tree [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.610399] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.706805] env[62923]: INFO nova.compute.manager [-] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Took 1.03 seconds to deallocate network for instance. [ 737.709196] env[62923]: DEBUG nova.compute.claims [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 737.709443] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.723772] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.899859] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Releasing lock "refresh_cache-c370e9a9-3c09-418c-b2fc-e75323298518" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.900117] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 737.900289] env[62923]: DEBUG nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 737.900455] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.913704] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.113569] env[62923]: DEBUG nova.scheduler.client.report [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.226125] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Releasing lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.226747] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 738.226747] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.227040] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aac56943-0052-4f0c-a452-6e7964a57beb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.236205] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8262a678-a522-45b7-9c9e-6dc623cff587 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.257885] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fcee63c5-eaa3-4d8c-a612-9c30087433e1 could not be found. [ 738.258115] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 738.258296] env[62923]: INFO nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 738.258528] env[62923]: DEBUG oslo.service.loopingcall [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.258740] env[62923]: DEBUG nova.compute.manager [-] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.258830] env[62923]: DEBUG nova.network.neutron [-] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 738.272635] env[62923]: DEBUG nova.network.neutron [-] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.406428] env[62923]: DEBUG nova.compute.manager [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Received event network-changed-5866fb2d-9382-4b20-9cf1-def904197df6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 738.406725] env[62923]: DEBUG nova.compute.manager [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Refreshing instance network info cache due to event network-changed-5866fb2d-9382-4b20-9cf1-def904197df6. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 738.406846] env[62923]: DEBUG oslo_concurrency.lockutils [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] Acquiring lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.406983] env[62923]: DEBUG oslo_concurrency.lockutils [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] Acquired lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.407182] env[62923]: DEBUG nova.network.neutron [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Refreshing network info cache for port 5866fb2d-9382-4b20-9cf1-def904197df6 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.416347] env[62923]: DEBUG nova.network.neutron [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.618912] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.830s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.619579] env[62923]: ERROR nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Traceback (most recent call last): [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self.driver.spawn(context, instance, image_meta, [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] vm_ref = self.build_virtual_machine(instance, [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.619579] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] for vif in network_info: [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return self._sync_wrapper(fn, *args, **kwargs) [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self.wait() [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self[:] = self._gt.wait() [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return self._exit_event.wait() [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] result = hub.switch() [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 738.619970] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return self.greenlet.switch() [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] result = function(*args, **kwargs) [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] return func(*args, **kwargs) [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] raise e [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] nwinfo = self.network_api.allocate_for_instance( [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] created_port_ids = self._update_ports_for_instance( [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] with excutils.save_and_reraise_exception(): [ 738.620394] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] self.force_reraise() [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] raise self.value [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] updated_port = self._update_port( [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] _ensure_no_port_binding_failure(port) [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] raise exception.PortBindingFailed(port_id=port['id']) [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] nova.exception.PortBindingFailed: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. [ 738.620806] env[62923]: ERROR nova.compute.manager [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] [ 738.621162] env[62923]: DEBUG nova.compute.utils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 738.621570] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.661s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.623114] env[62923]: INFO nova.compute.claims [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.625719] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Build of instance f853c572-ad40-4cce-83d4-d5f11b42c37f was re-scheduled: Binding failed for port 1fac04a8-79b0-4aed-ac19-3a7c50287f9d, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 738.626145] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 738.626391] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquiring lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.626536] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Acquired lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.626691] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.775746] env[62923]: DEBUG nova.network.neutron [-] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.918585] env[62923]: INFO nova.compute.manager [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: c370e9a9-3c09-418c-b2fc-e75323298518] Took 1.02 seconds to deallocate network for instance. [ 738.924020] env[62923]: DEBUG nova.network.neutron [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.034289] env[62923]: DEBUG nova.network.neutron [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.145197] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.204995] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.278860] env[62923]: INFO nova.compute.manager [-] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Took 1.02 seconds to deallocate network for instance. [ 739.281545] env[62923]: DEBUG nova.compute.claims [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 739.281721] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.536987] env[62923]: DEBUG oslo_concurrency.lockutils [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] Releasing lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.537297] env[62923]: DEBUG nova.compute.manager [req-448b1b99-4ea4-4d9d-9a16-5c6daeffac27 req-5b1dac2d-d281-4224-8a1a-5b437fc7d5f8 service nova] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Received event network-vif-deleted-5866fb2d-9382-4b20-9cf1-def904197df6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.707415] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Releasing lock "refresh_cache-f853c572-ad40-4cce-83d4-d5f11b42c37f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.707629] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 739.707802] env[62923]: DEBUG nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 739.707948] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.723443] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.880439] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68ca109-7cad-49db-b3ec-2c49f467d068 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.888143] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904d3dd4-5063-411b-b5b8-4172b9bf9f30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.918102] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4389e844-c9fc-4d13-a409-0a396a2e2475 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.925274] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d387af-a551-49cc-8761-6e16dd2abfee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.942782] env[62923]: DEBUG nova.compute.provider_tree [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.949381] env[62923]: INFO nova.scheduler.client.report [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Deleted allocations for instance c370e9a9-3c09-418c-b2fc-e75323298518 [ 740.226602] env[62923]: DEBUG nova.network.neutron [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.446049] env[62923]: DEBUG nova.scheduler.client.report [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.456458] env[62923]: DEBUG oslo_concurrency.lockutils [None req-865fc905-6cba-4eda-8d38-cd2e90f83cc4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "c370e9a9-3c09-418c-b2fc-e75323298518" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.252s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.733188] env[62923]: INFO nova.compute.manager [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] [instance: f853c572-ad40-4cce-83d4-d5f11b42c37f] Took 1.02 seconds to deallocate network for instance. [ 740.951312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.951786] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 740.954903] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.316s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.959024] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 741.459418] env[62923]: DEBUG nova.compute.utils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.463811] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 741.463980] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.481918] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.515136] env[62923]: DEBUG nova.policy [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 741.712819] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5bf542-3fe5-486c-99d0-f0b0575ff0af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.720403] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42d0e30-0422-46e8-a061-a33b042fb95e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.751642] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7ce130-8bc9-42c7-b31c-c06a28ea94d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.761216] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4f443d-ae08-446d-b54e-fafb3253cb22 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.776526] env[62923]: DEBUG nova.compute.provider_tree [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.780062] env[62923]: INFO nova.scheduler.client.report [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Deleted allocations for instance f853c572-ad40-4cce-83d4-d5f11b42c37f [ 741.804426] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Successfully created port: e410cdcf-56e9-4a0b-b47c-c855b5d11d1e {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.964535] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 742.284762] env[62923]: DEBUG nova.scheduler.client.report [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 742.288396] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fcc94ade-90bd-421c-8108-69e3141fc0f4 tempest-ListImageFiltersTestJSON-353085782 tempest-ListImageFiltersTestJSON-353085782-project-member] Lock "f853c572-ad40-4cce-83d4-d5f11b42c37f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.412s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.717707] env[62923]: DEBUG nova.compute.manager [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Received event network-changed-e410cdcf-56e9-4a0b-b47c-c855b5d11d1e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.717922] env[62923]: DEBUG nova.compute.manager [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Refreshing instance network info cache due to event network-changed-e410cdcf-56e9-4a0b-b47c-c855b5d11d1e. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 742.718153] env[62923]: DEBUG oslo_concurrency.lockutils [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] Acquiring lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.718296] env[62923]: DEBUG oslo_concurrency.lockutils [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] Acquired lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.718450] env[62923]: DEBUG nova.network.neutron [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Refreshing network info cache for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.791206] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.836s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.791838] env[62923]: ERROR nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] Traceback (most recent call last): [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self.driver.spawn(context, instance, image_meta, [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] vm_ref = self.build_virtual_machine(instance, [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.791838] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] for vif in network_info: [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] return self._sync_wrapper(fn, *args, **kwargs) [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self.wait() [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self[:] = self._gt.wait() [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] return self._exit_event.wait() [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] current.throw(*self._exc) [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.792244] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] result = function(*args, **kwargs) [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] return func(*args, **kwargs) [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] raise e [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] nwinfo = self.network_api.allocate_for_instance( [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] created_port_ids = self._update_ports_for_instance( [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] with excutils.save_and_reraise_exception(): [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] self.force_reraise() [ 742.792598] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] raise self.value [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] updated_port = self._update_port( [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] _ensure_no_port_binding_failure(port) [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] raise exception.PortBindingFailed(port_id=port['id']) [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] nova.exception.PortBindingFailed: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. [ 742.792951] env[62923]: ERROR nova.compute.manager [instance: b48be393-189f-4093-b079-fe555192e7ed] [ 742.792951] env[62923]: DEBUG nova.compute.utils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 742.796585] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Build of instance b48be393-189f-4093-b079-fe555192e7ed was re-scheduled: Binding failed for port b47d0b6e-9eac-4506-b983-6bddfe19f795, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 742.797242] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 742.797592] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquiring lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.797830] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Acquired lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.798112] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.799615] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 742.802240] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.891s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.803641] env[62923]: INFO nova.compute.claims [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.894982] env[62923]: ERROR nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 742.894982] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.894982] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.894982] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.894982] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.894982] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.894982] env[62923]: ERROR nova.compute.manager raise self.value [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.894982] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 742.894982] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.894982] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 742.896768] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.896768] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 742.896768] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 742.896768] env[62923]: ERROR nova.compute.manager [ 742.896768] env[62923]: Traceback (most recent call last): [ 742.896768] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 742.896768] env[62923]: listener.cb(fileno) [ 742.896768] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.896768] env[62923]: result = function(*args, **kwargs) [ 742.896768] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 742.896768] env[62923]: return func(*args, **kwargs) [ 742.896768] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 742.896768] env[62923]: raise e [ 742.896768] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 742.896768] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 742.896768] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.896768] env[62923]: created_port_ids = self._update_ports_for_instance( [ 742.896768] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.896768] env[62923]: with excutils.save_and_reraise_exception(): [ 742.896768] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.896768] env[62923]: self.force_reraise() [ 742.896768] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.896768] env[62923]: raise self.value [ 742.896768] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.896768] env[62923]: updated_port = self._update_port( [ 742.896768] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.896768] env[62923]: _ensure_no_port_binding_failure(port) [ 742.896768] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.896768] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 742.898188] env[62923]: nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 742.898188] env[62923]: Removing descriptor: 18 [ 742.977618] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 742.998373] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.998518] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.998675] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.999029] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.999029] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.999152] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.999326] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.999526] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.999712] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.999871] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 743.000048] env[62923]: DEBUG nova.virt.hardware [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 743.000901] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7e39e7-82d0-4e47-9438-3b422799d9dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.009281] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35082c66-51e4-4201-ba37-92628716d6ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.022663] env[62923]: ERROR nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Traceback (most recent call last): [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] yield resources [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self.driver.spawn(context, instance, image_meta, [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] vm_ref = self.build_virtual_machine(instance, [ 743.022663] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] vif_infos = vmwarevif.get_vif_info(self._session, [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] for vif in network_info: [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] return self._sync_wrapper(fn, *args, **kwargs) [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self.wait() [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self[:] = self._gt.wait() [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] return self._exit_event.wait() [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 743.023037] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] current.throw(*self._exc) [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] result = function(*args, **kwargs) [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] return func(*args, **kwargs) [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] raise e [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] nwinfo = self.network_api.allocate_for_instance( [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] created_port_ids = self._update_ports_for_instance( [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] with excutils.save_and_reraise_exception(): [ 743.023465] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self.force_reraise() [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] raise self.value [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] updated_port = self._update_port( [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] _ensure_no_port_binding_failure(port) [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] raise exception.PortBindingFailed(port_id=port['id']) [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 743.023899] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] [ 743.023899] env[62923]: INFO nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Terminating instance [ 743.025691] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.250337] env[62923]: DEBUG nova.network.neutron [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.325213] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.331251] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.410262] env[62923]: DEBUG nova.network.neutron [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.473921] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.913180] env[62923]: DEBUG oslo_concurrency.lockutils [req-a5ba8ce2-2a10-441f-830a-e79e30a941ae req-05d0f08d-54eb-4137-a03f-e475e0b7f2fc service nova] Releasing lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.913579] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.913759] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 743.979545] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Releasing lock "refresh_cache-b48be393-189f-4093-b079-fe555192e7ed" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.979776] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 743.979954] env[62923]: DEBUG nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 743.980130] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.002014] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.067212] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b21c42-7c79-4a48-91e7-240ed4a3b64b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.077818] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62f00e7-5150-43e6-95e1-7d85d7bb486c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.109143] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9c5cd6-20ea-4e6b-b7cb-fe96ecf9910b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.116510] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e24c972-3eb8-406e-89b6-368b5bd5e898 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.129481] env[62923]: DEBUG nova.compute.provider_tree [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.447184] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.505765] env[62923]: DEBUG nova.network.neutron [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.573870] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.632838] env[62923]: DEBUG nova.scheduler.client.report [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 744.753815] env[62923]: DEBUG nova.compute.manager [req-50674c87-ed22-4086-bad1-d3a8f41dec1a req-61c6a22b-9dab-49a5-ad66-0e275d05e38d service nova] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Received event network-vif-deleted-e410cdcf-56e9-4a0b-b47c-c855b5d11d1e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.010020] env[62923]: INFO nova.compute.manager [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] [instance: b48be393-189f-4093-b079-fe555192e7ed] Took 1.03 seconds to deallocate network for instance. [ 745.077232] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.077660] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 745.077858] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.078167] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13a87efb-8900-4e13-8173-04d132702aa5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.092229] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893aa2df-3d71-47f2-99b6-931015ab1b24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.117515] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b334c72-11f5-4165-a350-09fe5487a9a0 could not be found. [ 745.117781] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 745.117997] env[62923]: INFO nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 745.118535] env[62923]: DEBUG oslo.service.loopingcall [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 745.118535] env[62923]: DEBUG nova.compute.manager [-] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 745.118930] env[62923]: DEBUG nova.network.neutron [-] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 745.140489] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.141066] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 745.144473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.281s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.146360] env[62923]: INFO nova.compute.claims [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 745.319452] env[62923]: DEBUG nova.network.neutron [-] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.656029] env[62923]: DEBUG nova.compute.utils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.656029] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 745.656029] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.742281] env[62923]: DEBUG nova.policy [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '261984afa2c04a6198c0af2e732484ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cbc1831f9e2d4696b0529a07cda0f06a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 745.823293] env[62923]: DEBUG nova.network.neutron [-] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.047529] env[62923]: INFO nova.scheduler.client.report [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Deleted allocations for instance b48be393-189f-4093-b079-fe555192e7ed [ 746.081912] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Successfully created port: 0d3638e8-403e-4555-a591-7607de7d759f {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.159676] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 746.327075] env[62923]: INFO nova.compute.manager [-] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Took 1.21 seconds to deallocate network for instance. [ 746.329518] env[62923]: DEBUG nova.compute.claims [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 746.329731] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.462019] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14054f1e-41f7-4963-aa50-29ee0525ddad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.469395] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7425ae-c50b-4b45-9439-627cd5f04a58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.501719] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5256d63c-1383-422c-a455-a62e1f1b4da6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.508856] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96b21b7-ad96-4b49-a60b-8b6cb4030d6c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.522679] env[62923]: DEBUG nova.compute.provider_tree [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.558011] env[62923]: DEBUG oslo_concurrency.lockutils [None req-28904507-50ba-44f9-b24d-8fbfdab9bbc2 tempest-ServersTestBootFromVolume-414653886 tempest-ServersTestBootFromVolume-414653886-project-member] Lock "b48be393-189f-4093-b079-fe555192e7ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.509s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.025867] env[62923]: DEBUG nova.scheduler.client.report [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.060908] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 747.170677] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 747.198009] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.198260] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.198416] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.198607] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.199226] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.199463] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.199767] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.200025] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.200153] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.200324] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.200515] env[62923]: DEBUG nova.virt.hardware [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.201978] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9850dda7-9624-43d4-9e84-dfba2294a8ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.210730] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385fe3d0-209f-4c5f-993b-2d70d02fabf6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.331111] env[62923]: DEBUG nova.compute.manager [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Received event network-changed-0d3638e8-403e-4555-a591-7607de7d759f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.331317] env[62923]: DEBUG nova.compute.manager [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Refreshing instance network info cache due to event network-changed-0d3638e8-403e-4555-a591-7607de7d759f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 747.331561] env[62923]: DEBUG oslo_concurrency.lockutils [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] Acquiring lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.331667] env[62923]: DEBUG oslo_concurrency.lockutils [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] Acquired lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.331813] env[62923]: DEBUG nova.network.neutron [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Refreshing network info cache for port 0d3638e8-403e-4555-a591-7607de7d759f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.423350] env[62923]: ERROR nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 747.423350] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 747.423350] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.423350] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.423350] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.423350] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.423350] env[62923]: ERROR nova.compute.manager raise self.value [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.423350] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 747.423350] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.423350] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 747.423803] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.423803] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 747.423803] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 747.423803] env[62923]: ERROR nova.compute.manager [ 747.423803] env[62923]: Traceback (most recent call last): [ 747.423803] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 747.423803] env[62923]: listener.cb(fileno) [ 747.423803] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.423803] env[62923]: result = function(*args, **kwargs) [ 747.423803] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 747.423803] env[62923]: return func(*args, **kwargs) [ 747.423803] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 747.423803] env[62923]: raise e [ 747.423803] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 747.423803] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 747.423803] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.423803] env[62923]: created_port_ids = self._update_ports_for_instance( [ 747.423803] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.423803] env[62923]: with excutils.save_and_reraise_exception(): [ 747.423803] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.423803] env[62923]: self.force_reraise() [ 747.423803] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.423803] env[62923]: raise self.value [ 747.423803] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.423803] env[62923]: updated_port = self._update_port( [ 747.423803] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.423803] env[62923]: _ensure_no_port_binding_failure(port) [ 747.423803] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.423803] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 747.424634] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 747.424634] env[62923]: Removing descriptor: 21 [ 747.424910] env[62923]: ERROR nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Traceback (most recent call last): [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] yield resources [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self.driver.spawn(context, instance, image_meta, [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] vm_ref = self.build_virtual_machine(instance, [ 747.424910] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] for vif in network_info: [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return self._sync_wrapper(fn, *args, **kwargs) [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self.wait() [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self[:] = self._gt.wait() [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return self._exit_event.wait() [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 747.425455] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] result = hub.switch() [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return self.greenlet.switch() [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] result = function(*args, **kwargs) [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return func(*args, **kwargs) [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] raise e [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] nwinfo = self.network_api.allocate_for_instance( [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] created_port_ids = self._update_ports_for_instance( [ 747.425957] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] with excutils.save_and_reraise_exception(): [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self.force_reraise() [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] raise self.value [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] updated_port = self._update_port( [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] _ensure_no_port_binding_failure(port) [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] raise exception.PortBindingFailed(port_id=port['id']) [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 747.426475] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] [ 747.427007] env[62923]: INFO nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Terminating instance [ 747.427754] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquiring lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.470292] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.470688] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.531362] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.531876] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 747.534494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.708s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.537756] env[62923]: INFO nova.compute.claims [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.591410] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.868065] env[62923]: DEBUG nova.network.neutron [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.009033] env[62923]: DEBUG nova.network.neutron [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.042027] env[62923]: DEBUG nova.compute.utils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 748.043201] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 748.043363] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 748.127658] env[62923]: DEBUG nova.policy [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c654b8365f5543f3bf713f3f5aa00654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a654d46357ed49cd95460a56926f102a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 748.514021] env[62923]: DEBUG oslo_concurrency.lockutils [req-65a7726f-abf4-4ee0-9028-2922f37d1d51 req-046126a3-42fa-439b-b39d-75b4a4e66eec service nova] Releasing lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.514021] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquired lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.514021] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.547107] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 748.873062] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1ba213-b4b3-41d6-8ea2-532b162cdab3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.880809] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe0de21-39d7-4fa2-a568-662ddbc994ad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.913231] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Successfully created port: 44334227-49c0-4835-986e-235c82f82d3d {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.915591] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8341022b-2644-43e8-8bd8-a3b8b9d726ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.923278] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b81fde-ce49-4ea3-866f-ac87fb578813 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.938763] env[62923]: DEBUG nova.compute.provider_tree [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.048138] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.212281] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.378274] env[62923]: DEBUG nova.compute.manager [req-625b6d93-8da9-4bc0-826e-dbf110db78e6 req-42f52e83-2462-4ec9-b788-bc4c54ff60d8 service nova] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Received event network-vif-deleted-0d3638e8-403e-4555-a591-7607de7d759f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 749.442057] env[62923]: DEBUG nova.scheduler.client.report [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.569557] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 749.605224] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.605224] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.605224] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.605446] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.605446] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.606728] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.606728] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.606728] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.606728] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.607217] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.611027] env[62923]: DEBUG nova.virt.hardware [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.611027] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7459536-9225-4322-9c5d-3efa8e88ed8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.625320] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf88981-1727-410d-b749-e05024bcff2c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.716726] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Releasing lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.717455] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 749.717803] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.718247] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3383d20-cf17-4426-b3e0-8b9f4d31b68f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.730199] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f6ffca-7653-44fa-a30a-348b7b8bbfa4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.755981] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf0222ef-b86f-4d85-ab75-96661b90a4b4 could not be found. [ 749.756599] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.756599] env[62923]: INFO nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 749.756743] env[62923]: DEBUG oslo.service.loopingcall [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.757044] env[62923]: DEBUG nova.compute.manager [-] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 749.757182] env[62923]: DEBUG nova.network.neutron [-] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 749.776539] env[62923]: DEBUG nova.network.neutron [-] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.951022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.951022] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 749.951727] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.301s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.953281] env[62923]: INFO nova.compute.claims [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.220679] env[62923]: ERROR nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 750.220679] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 750.220679] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 750.220679] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 750.220679] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.220679] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.220679] env[62923]: ERROR nova.compute.manager raise self.value [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 750.220679] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 750.220679] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.220679] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 750.221389] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.221389] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 750.221389] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 750.221389] env[62923]: ERROR nova.compute.manager [ 750.221389] env[62923]: Traceback (most recent call last): [ 750.221389] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 750.221389] env[62923]: listener.cb(fileno) [ 750.221389] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 750.221389] env[62923]: result = function(*args, **kwargs) [ 750.221389] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 750.221389] env[62923]: return func(*args, **kwargs) [ 750.221389] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 750.221389] env[62923]: raise e [ 750.221389] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 750.221389] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 750.221389] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 750.221389] env[62923]: created_port_ids = self._update_ports_for_instance( [ 750.221389] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 750.221389] env[62923]: with excutils.save_and_reraise_exception(): [ 750.221389] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.221389] env[62923]: self.force_reraise() [ 750.221389] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.221389] env[62923]: raise self.value [ 750.221389] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 750.221389] env[62923]: updated_port = self._update_port( [ 750.221389] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.221389] env[62923]: _ensure_no_port_binding_failure(port) [ 750.221389] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.221389] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 750.222385] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 750.222385] env[62923]: Removing descriptor: 21 [ 750.222385] env[62923]: ERROR nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Traceback (most recent call last): [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] yield resources [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self.driver.spawn(context, instance, image_meta, [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 750.222385] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] vm_ref = self.build_virtual_machine(instance, [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] vif_infos = vmwarevif.get_vif_info(self._session, [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] for vif in network_info: [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return self._sync_wrapper(fn, *args, **kwargs) [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self.wait() [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self[:] = self._gt.wait() [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return self._exit_event.wait() [ 750.222748] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] result = hub.switch() [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return self.greenlet.switch() [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] result = function(*args, **kwargs) [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return func(*args, **kwargs) [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] raise e [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] nwinfo = self.network_api.allocate_for_instance( [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 750.223191] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] created_port_ids = self._update_ports_for_instance( [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] with excutils.save_and_reraise_exception(): [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self.force_reraise() [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] raise self.value [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] updated_port = self._update_port( [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] _ensure_no_port_binding_failure(port) [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.223618] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] raise exception.PortBindingFailed(port_id=port['id']) [ 750.224371] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 750.224371] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] [ 750.224371] env[62923]: INFO nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Terminating instance [ 750.224371] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.224371] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.224371] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.279448] env[62923]: DEBUG nova.network.neutron [-] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.458151] env[62923]: DEBUG nova.compute.utils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 750.462286] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 750.462286] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.543509] env[62923]: DEBUG nova.policy [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f51867bb77848c6b905238787450f2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '138722e5c02a4a03b482220725bb30c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 750.761637] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.785565] env[62923]: INFO nova.compute.manager [-] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Took 1.03 seconds to deallocate network for instance. [ 750.789332] env[62923]: DEBUG nova.compute.claims [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 750.789509] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.923926] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.963073] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 751.019184] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Successfully created port: b7b683c3-cbd9-4f68-a369-ca8feccb9ba7 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.359799] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3617be3c-8a99-41d5-aa45-07ec9a7f18a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.371266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018b1a25-4835-42ab-a063-9c40879bf3da {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.411833] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6120c40b-c63e-4df2-a871-6ac7c05d40af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.415431] env[62923]: DEBUG nova.compute.manager [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Received event network-changed-44334227-49c0-4835-986e-235c82f82d3d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 751.415608] env[62923]: DEBUG nova.compute.manager [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Refreshing instance network info cache due to event network-changed-44334227-49c0-4835-986e-235c82f82d3d. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 751.415801] env[62923]: DEBUG oslo_concurrency.lockutils [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] Acquiring lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.424283] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1d2746-fd1c-4533-bdac-edf868495ab3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.426142] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.426536] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 751.426712] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.426981] env[62923]: DEBUG oslo_concurrency.lockutils [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] Acquired lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.427153] env[62923]: DEBUG nova.network.neutron [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Refreshing network info cache for port 44334227-49c0-4835-986e-235c82f82d3d {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.428332] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f706213-dd05-4c44-b7c3-e44f8a74732d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.441020] env[62923]: DEBUG nova.compute.provider_tree [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.446592] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f67393-a957-451c-beef-33fdd8a1f9c5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.468959] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ac14f710-41c0-429c-92a3-46acceace3fc could not be found. [ 751.469162] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.469375] env[62923]: INFO nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 751.469566] env[62923]: DEBUG oslo.service.loopingcall [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.472713] env[62923]: DEBUG nova.compute.manager [-] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.472806] env[62923]: DEBUG nova.network.neutron [-] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.496584] env[62923]: DEBUG nova.network.neutron [-] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.942761] env[62923]: DEBUG nova.scheduler.client.report [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.951445] env[62923]: DEBUG nova.network.neutron [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.976188] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 751.999075] env[62923]: DEBUG nova.network.neutron [-] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.013188] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 752.013461] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 752.013578] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.013791] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 752.013902] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.015087] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 752.015394] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 752.015678] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 752.015752] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 752.015889] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 752.016075] env[62923]: DEBUG nova.virt.hardware [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 752.016941] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b329354c-4dbc-4b7b-83a8-092f1934ce2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.030566] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b2c671-33f8-4da0-b37a-cf54efc1ba43 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.098953] env[62923]: DEBUG nova.network.neutron [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.267639] env[62923]: ERROR nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 752.267639] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.267639] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.267639] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.267639] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.267639] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.267639] env[62923]: ERROR nova.compute.manager raise self.value [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.267639] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 752.267639] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.267639] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 752.268240] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.268240] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 752.268240] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 752.268240] env[62923]: ERROR nova.compute.manager [ 752.269020] env[62923]: Traceback (most recent call last): [ 752.269113] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 752.269113] env[62923]: listener.cb(fileno) [ 752.269113] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.269113] env[62923]: result = function(*args, **kwargs) [ 752.269113] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 752.269113] env[62923]: return func(*args, **kwargs) [ 752.269113] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.269113] env[62923]: raise e [ 752.269113] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.269113] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 752.269113] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.269113] env[62923]: created_port_ids = self._update_ports_for_instance( [ 752.269113] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.269113] env[62923]: with excutils.save_and_reraise_exception(): [ 752.269113] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.269113] env[62923]: self.force_reraise() [ 752.269113] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.269113] env[62923]: raise self.value [ 752.269113] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.269113] env[62923]: updated_port = self._update_port( [ 752.269113] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.269113] env[62923]: _ensure_no_port_binding_failure(port) [ 752.269113] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.269113] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 752.269113] env[62923]: nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 752.269113] env[62923]: Removing descriptor: 21 [ 752.270515] env[62923]: ERROR nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Traceback (most recent call last): [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] yield resources [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self.driver.spawn(context, instance, image_meta, [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] vm_ref = self.build_virtual_machine(instance, [ 752.270515] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] for vif in network_info: [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return self._sync_wrapper(fn, *args, **kwargs) [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self.wait() [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self[:] = self._gt.wait() [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return self._exit_event.wait() [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 752.270898] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] result = hub.switch() [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return self.greenlet.switch() [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] result = function(*args, **kwargs) [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return func(*args, **kwargs) [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] raise e [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] nwinfo = self.network_api.allocate_for_instance( [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] created_port_ids = self._update_ports_for_instance( [ 752.271298] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] with excutils.save_and_reraise_exception(): [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self.force_reraise() [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] raise self.value [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] updated_port = self._update_port( [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] _ensure_no_port_binding_failure(port) [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] raise exception.PortBindingFailed(port_id=port['id']) [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 752.271658] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] [ 752.272044] env[62923]: INFO nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Terminating instance [ 752.272820] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquiring lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.272981] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquired lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.273169] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 752.448331] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.449070] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 752.453058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.560s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.507490] env[62923]: INFO nova.compute.manager [-] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Took 1.03 seconds to deallocate network for instance. [ 752.509843] env[62923]: DEBUG nova.compute.claims [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 752.510045] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.601892] env[62923]: DEBUG oslo_concurrency.lockutils [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] Releasing lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.602270] env[62923]: DEBUG nova.compute.manager [req-5780f4e7-fd11-4cc9-b881-98398fa1f6ca req-ccb3e15e-30c8-42fe-badc-76a0c206bdbe service nova] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Received event network-vif-deleted-44334227-49c0-4835-986e-235c82f82d3d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 752.801071] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.954515] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.962276] env[62923]: DEBUG nova.compute.utils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.964027] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 752.965406] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 753.017453] env[62923]: DEBUG nova.policy [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce8ba7cae2944508952117f51934c930', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bd6d898b4dc54b9f9eb4e198bdadfc40', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 753.464835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Releasing lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.465180] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 753.467580] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.467580] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cde0d2ce-d474-47c2-b74c-33bb25424c0e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.477819] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69895e44-7d46-49a2-b943-d24045d7fb38 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.494396] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance a701b2b9-10df-4ba3-8b78-b6b486d8f1db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.494396] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance fcee63c5-eaa3-4d8c-a612-9c30087433e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.494396] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 9b334c72-11f5-4165-a350-09fe5487a9a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.494574] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance bf0222ef-b86f-4d85-ab75-96661b90a4b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.494574] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance ac14f710-41c0-429c-92a3-46acceace3fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.495561] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 78daba16-0c0f-4db6-bde1-70d960a6e7ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.495561] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7831bfb8-b336-4338-923f-c759a5c67c06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.497030] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 753.512250] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 78daba16-0c0f-4db6-bde1-70d960a6e7ae could not be found. [ 753.512250] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.512481] env[62923]: INFO nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Took 0.05 seconds to destroy the instance on the hypervisor. [ 753.513061] env[62923]: DEBUG oslo.service.loopingcall [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.514944] env[62923]: DEBUG nova.compute.manager [-] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 753.515050] env[62923]: DEBUG nova.network.neutron [-] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.517820] env[62923]: DEBUG nova.compute.manager [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Received event network-changed-b7b683c3-cbd9-4f68-a369-ca8feccb9ba7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 753.518068] env[62923]: DEBUG nova.compute.manager [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Refreshing instance network info cache due to event network-changed-b7b683c3-cbd9-4f68-a369-ca8feccb9ba7. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 753.518215] env[62923]: DEBUG oslo_concurrency.lockutils [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] Acquiring lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.518365] env[62923]: DEBUG oslo_concurrency.lockutils [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] Acquired lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.518577] env[62923]: DEBUG nova.network.neutron [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Refreshing network info cache for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.558756] env[62923]: DEBUG nova.network.neutron [-] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 753.634079] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Successfully created port: db91e8e4-4ec7-42cf-a81e-46717ae18c5b {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.654303] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.655067] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.001156] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance c22aa745-0e4a-40fd-903f-edba79cbf88b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.042084] env[62923]: DEBUG nova.network.neutron [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.062154] env[62923]: DEBUG nova.network.neutron [-] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.192917] env[62923]: DEBUG nova.network.neutron [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.506854] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 71dd8aff-4500-4c91-8a46-2a398fd03560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.511178] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 754.547386] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 754.547628] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 754.547779] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.547956] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 754.548113] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.548258] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 754.548457] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 754.548613] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 754.548771] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 754.548927] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 754.549105] env[62923]: DEBUG nova.virt.hardware [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.549957] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4d6aae-64ee-4e4c-b0f8-5f5816f97594 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.559647] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e811065c-6304-476f-8ac9-d739aafe35b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.564880] env[62923]: INFO nova.compute.manager [-] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Took 1.05 seconds to deallocate network for instance. [ 754.566952] env[62923]: DEBUG nova.compute.claims [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 754.567147] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.697757] env[62923]: DEBUG oslo_concurrency.lockutils [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] Releasing lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.698036] env[62923]: DEBUG nova.compute.manager [req-6a907ba8-c124-427b-9d96-46204b965274 req-3240fdac-2037-4d1d-82fa-c021e69de607 service nova] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Received event network-vif-deleted-b7b683c3-cbd9-4f68-a369-ca8feccb9ba7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 754.773878] env[62923]: ERROR nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 754.773878] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.773878] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 754.773878] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 754.773878] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.773878] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.773878] env[62923]: ERROR nova.compute.manager raise self.value [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 754.773878] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 754.773878] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.773878] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 754.774485] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.774485] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 754.774485] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 754.774485] env[62923]: ERROR nova.compute.manager [ 754.774485] env[62923]: Traceback (most recent call last): [ 754.774485] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 754.774485] env[62923]: listener.cb(fileno) [ 754.774485] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 754.774485] env[62923]: result = function(*args, **kwargs) [ 754.774485] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 754.774485] env[62923]: return func(*args, **kwargs) [ 754.774485] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 754.774485] env[62923]: raise e [ 754.774485] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.774485] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 754.774485] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 754.774485] env[62923]: created_port_ids = self._update_ports_for_instance( [ 754.774485] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 754.774485] env[62923]: with excutils.save_and_reraise_exception(): [ 754.774485] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.774485] env[62923]: self.force_reraise() [ 754.774485] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.774485] env[62923]: raise self.value [ 754.774485] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 754.774485] env[62923]: updated_port = self._update_port( [ 754.774485] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.774485] env[62923]: _ensure_no_port_binding_failure(port) [ 754.774485] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.774485] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 754.775270] env[62923]: nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 754.775270] env[62923]: Removing descriptor: 21 [ 754.775270] env[62923]: ERROR nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Traceback (most recent call last): [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] yield resources [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self.driver.spawn(context, instance, image_meta, [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 754.775270] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] vm_ref = self.build_virtual_machine(instance, [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] vif_infos = vmwarevif.get_vif_info(self._session, [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] for vif in network_info: [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return self._sync_wrapper(fn, *args, **kwargs) [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self.wait() [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self[:] = self._gt.wait() [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return self._exit_event.wait() [ 754.775608] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] result = hub.switch() [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return self.greenlet.switch() [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] result = function(*args, **kwargs) [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return func(*args, **kwargs) [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] raise e [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] nwinfo = self.network_api.allocate_for_instance( [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 754.775966] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] created_port_ids = self._update_ports_for_instance( [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] with excutils.save_and_reraise_exception(): [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self.force_reraise() [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] raise self.value [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] updated_port = self._update_port( [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] _ensure_no_port_binding_failure(port) [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.776334] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] raise exception.PortBindingFailed(port_id=port['id']) [ 754.776691] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 754.776691] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] [ 754.776691] env[62923]: INFO nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Terminating instance [ 754.777316] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquiring lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.777316] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquired lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.777437] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 755.013926] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d65ce1f0-e9de-4fc8-828b-95aec5615f95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.294838] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.387844] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.517474] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f76d2304-7a4e-4f18-80de-ecb0b67bec28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.575723] env[62923]: DEBUG nova.compute.manager [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Received event network-changed-db91e8e4-4ec7-42cf-a81e-46717ae18c5b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 755.575919] env[62923]: DEBUG nova.compute.manager [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Refreshing instance network info cache due to event network-changed-db91e8e4-4ec7-42cf-a81e-46717ae18c5b. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 755.576121] env[62923]: DEBUG oslo_concurrency.lockutils [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] Acquiring lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.890868] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Releasing lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.891337] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 755.891536] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.891847] env[62923]: DEBUG oslo_concurrency.lockutils [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] Acquired lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.892017] env[62923]: DEBUG nova.network.neutron [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Refreshing network info cache for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.893101] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0cbd2765-a08f-42ec-ae95-722fb8bb41df {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.901924] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0666b08e-587c-4d8a-943f-44492f14bad1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.923084] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7831bfb8-b336-4338-923f-c759a5c67c06 could not be found. [ 755.923287] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.923460] env[62923]: INFO nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Took 0.03 seconds to destroy the instance on the hypervisor. [ 755.923685] env[62923]: DEBUG oslo.service.loopingcall [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.923893] env[62923]: DEBUG nova.compute.manager [-] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 755.924009] env[62923]: DEBUG nova.network.neutron [-] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.937731] env[62923]: DEBUG nova.network.neutron [-] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.020434] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7c98c50a-e7c7-4430-b5c6-dec88a78c397 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.410979] env[62923]: DEBUG nova.network.neutron [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.440385] env[62923]: DEBUG nova.network.neutron [-] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.500157] env[62923]: DEBUG nova.network.neutron [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.523514] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance db26908c-6aa6-47b8-a3c4-461247e36d85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.943065] env[62923]: INFO nova.compute.manager [-] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Took 1.02 seconds to deallocate network for instance. [ 756.946711] env[62923]: DEBUG nova.compute.claims [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 756.946866] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.003611] env[62923]: DEBUG oslo_concurrency.lockutils [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] Releasing lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.003873] env[62923]: DEBUG nova.compute.manager [req-779bda46-3e29-4b00-9dd3-6d871c0e8b25 req-4d7f054e-c4b8-4f17-bade-639ad82d6a03 service nova] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Received event network-vif-deleted-db91e8e4-4ec7-42cf-a81e-46717ae18c5b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 757.026834] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 83ead303-c5b9-4600-935b-fa1a77689dcf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.529952] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance c2e48555-68b5-4ed0-8ad6-a87833538df8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.033170] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 8a369d56-8f85-4d04-ac6b-bf2eced7098f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.536548] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 08d39755-f94c-45aa-bfb5-f179e8a370db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.040046] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 98974fb7-049a-4c72-a352-bc0a50d2a879 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 759.542982] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 81cca322-c1a0-4fbd-8013-0e4a4694ecfd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 760.046085] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 6fa4d8a8-093f-4ae8-9148-f15f5bf98944 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 760.549039] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.052389] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 6cf594e3-e4a6-45f5-b8d2-06db1c200042 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 761.052713] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 761.052801] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 761.289953] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b302a246-4e6d-468a-9e96-aab58a3554f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.297563] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23572e0-aa18-4967-8a5e-adae0fc8c8f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.327811] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea083e8e-8f16-405e-978e-7631b3af92ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.334899] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22d45ed-8469-4c81-ab72-f2c36a4cd888 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.347496] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.850640] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 762.355720] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 762.355962] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.903s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.356259] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.647s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.103310] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11a8c1f-e2f4-4652-9102-8e185ac086de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.111087] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612c7217-4e09-411e-a66c-dc7d7ebc02ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.140793] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6e2795-d841-45ac-8483-b21bc1c8630e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.148471] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db5e1a0-f5e1-45f9-8bd6-cb372d06898a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.161800] env[62923]: DEBUG nova.compute.provider_tree [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.665690] env[62923]: DEBUG nova.scheduler.client.report [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 764.171066] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.815s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.171791] env[62923]: ERROR nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Traceback (most recent call last): [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self.driver.spawn(context, instance, image_meta, [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] vm_ref = self.build_virtual_machine(instance, [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] vif_infos = vmwarevif.get_vif_info(self._session, [ 764.171791] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] for vif in network_info: [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return self._sync_wrapper(fn, *args, **kwargs) [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self.wait() [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self[:] = self._gt.wait() [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return self._exit_event.wait() [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] result = hub.switch() [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 764.172252] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return self.greenlet.switch() [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] result = function(*args, **kwargs) [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] return func(*args, **kwargs) [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] raise e [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] nwinfo = self.network_api.allocate_for_instance( [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] created_port_ids = self._update_ports_for_instance( [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] with excutils.save_and_reraise_exception(): [ 764.172683] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] self.force_reraise() [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] raise self.value [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] updated_port = self._update_port( [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] _ensure_no_port_binding_failure(port) [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] raise exception.PortBindingFailed(port_id=port['id']) [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] nova.exception.PortBindingFailed: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. [ 764.173109] env[62923]: ERROR nova.compute.manager [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] [ 764.173467] env[62923]: DEBUG nova.compute.utils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 764.173982] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.892s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.178026] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Build of instance a701b2b9-10df-4ba3-8b78-b6b486d8f1db was re-scheduled: Binding failed for port b6985535-654b-4dcd-bc76-0ae48fb21c12, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 764.178269] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 764.178514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.178662] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.178818] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.699767] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.818368] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.977426] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbcc999-a21a-4bc7-9565-70e9e5b74e21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.985779] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5fbe04-a14f-4e22-90f0-d39a23b5c17e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.016207] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcd4200-cb46-4086-a7b0-01d8bd44ffff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.024566] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40c0dc7-97da-4962-a391-e2017400e0a7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.038968] env[62923]: DEBUG nova.compute.provider_tree [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.321222] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "refresh_cache-a701b2b9-10df-4ba3-8b78-b6b486d8f1db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.321501] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 765.321631] env[62923]: DEBUG nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 765.321825] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.336312] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.542110] env[62923]: DEBUG nova.scheduler.client.report [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.839437] env[62923]: DEBUG nova.network.neutron [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.046861] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.873s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.047512] env[62923]: ERROR nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Traceback (most recent call last): [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self.driver.spawn(context, instance, image_meta, [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] vm_ref = self.build_virtual_machine(instance, [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] vif_infos = vmwarevif.get_vif_info(self._session, [ 766.047512] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] for vif in network_info: [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return self._sync_wrapper(fn, *args, **kwargs) [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self.wait() [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self[:] = self._gt.wait() [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return self._exit_event.wait() [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] result = hub.switch() [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 766.047916] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return self.greenlet.switch() [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] result = function(*args, **kwargs) [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] return func(*args, **kwargs) [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] raise e [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] nwinfo = self.network_api.allocate_for_instance( [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] created_port_ids = self._update_ports_for_instance( [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] with excutils.save_and_reraise_exception(): [ 766.048290] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] self.force_reraise() [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] raise self.value [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] updated_port = self._update_port( [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] _ensure_no_port_binding_failure(port) [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] raise exception.PortBindingFailed(port_id=port['id']) [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] nova.exception.PortBindingFailed: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. [ 766.048811] env[62923]: ERROR nova.compute.manager [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] [ 766.049127] env[62923]: DEBUG nova.compute.utils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 766.049575] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.568s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.050986] env[62923]: INFO nova.compute.claims [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.054558] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Build of instance fcee63c5-eaa3-4d8c-a612-9c30087433e1 was re-scheduled: Binding failed for port 5866fb2d-9382-4b20-9cf1-def904197df6, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 766.055041] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 766.055255] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquiring lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.055402] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Acquired lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.055554] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.342636] env[62923]: INFO nova.compute.manager [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a701b2b9-10df-4ba3-8b78-b6b486d8f1db] Took 1.02 seconds to deallocate network for instance. [ 766.589193] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.692328] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.195766] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Releasing lock "refresh_cache-fcee63c5-eaa3-4d8c-a612-9c30087433e1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.196051] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 767.196251] env[62923]: DEBUG nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 767.196419] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.232685] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.331359] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926d9182-3f43-4b2b-92c3-4edc6c8530f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.339929] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a313329-2c31-4f0e-ad98-77907c73c579 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.375393] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e556bb9-8450-4b09-8286-981e768524b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.382848] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea8caa5-6da8-4c86-a4ce-32854020f06e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.396727] env[62923]: DEBUG nova.compute.provider_tree [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.398832] env[62923]: INFO nova.scheduler.client.report [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted allocations for instance a701b2b9-10df-4ba3-8b78-b6b486d8f1db [ 767.407387] env[62923]: DEBUG nova.scheduler.client.report [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 767.735205] env[62923]: DEBUG nova.network.neutron [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.912859] env[62923]: DEBUG oslo_concurrency.lockutils [None req-419e16ed-5e76-47b4-87e3-51f493e5e66d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a701b2b9-10df-4ba3-8b78-b6b486d8f1db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.878s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.913575] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.864s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.914053] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 767.917780] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.590s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.918687] env[62923]: INFO nova.compute.claims [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.238240] env[62923]: INFO nova.compute.manager [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] [instance: fcee63c5-eaa3-4d8c-a612-9c30087433e1] Took 1.04 seconds to deallocate network for instance. [ 768.421631] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 768.429566] env[62923]: DEBUG nova.compute.utils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.435700] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 768.435866] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.518921] env[62923]: DEBUG nova.policy [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b24f096a24d047519ca27c2b360ed269', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b2113ade30448d2be99700eeeeda3f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 768.912061] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Successfully created port: 165fb841-918f-4749-b4f0-ea003000049f {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.941375] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 768.948376] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.225082] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4715dd4b-7ac5-4c67-abc9-9c1db66adb9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.233257] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b464b830-33f7-49cb-899d-e5cd8035e12d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.267626] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcc0374-ebf0-49d2-a331-aea04c08afd3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.275266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fbc2b9-0d7d-4d84-ba35-2c0232f4d8bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.289248] env[62923]: DEBUG nova.compute.provider_tree [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.291207] env[62923]: INFO nova.scheduler.client.report [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Deleted allocations for instance fcee63c5-eaa3-4d8c-a612-9c30087433e1 [ 769.362690] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.362929] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.712020] env[62923]: DEBUG nova.compute.manager [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Received event network-changed-165fb841-918f-4749-b4f0-ea003000049f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 769.712276] env[62923]: DEBUG nova.compute.manager [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Refreshing instance network info cache due to event network-changed-165fb841-918f-4749-b4f0-ea003000049f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 769.712481] env[62923]: DEBUG oslo_concurrency.lockutils [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] Acquiring lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.712826] env[62923]: DEBUG oslo_concurrency.lockutils [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] Acquired lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.712826] env[62923]: DEBUG nova.network.neutron [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Refreshing network info cache for port 165fb841-918f-4749-b4f0-ea003000049f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.800019] env[62923]: DEBUG nova.scheduler.client.report [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 769.801112] env[62923]: DEBUG oslo_concurrency.lockutils [None req-da2f1280-54c5-4936-96c4-e24a74c50f84 tempest-InstanceActionsTestJSON-1075584289 tempest-InstanceActionsTestJSON-1075584289-project-member] Lock "fcee63c5-eaa3-4d8c-a612-9c30087433e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.062s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.954098] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 769.977387] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 769.977621] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 769.977769] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.977940] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 769.978095] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.978238] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 769.978432] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 769.978588] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 769.978746] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 769.978923] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 769.979858] env[62923]: DEBUG nova.virt.hardware [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 769.979988] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0347ea-683b-4124-a05b-dec0f3448382 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.983439] env[62923]: ERROR nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 769.983439] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.983439] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.983439] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.983439] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.983439] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.983439] env[62923]: ERROR nova.compute.manager raise self.value [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.983439] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 769.983439] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.983439] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 769.984023] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.984023] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 769.984023] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 769.984023] env[62923]: ERROR nova.compute.manager [ 769.984023] env[62923]: Traceback (most recent call last): [ 769.984023] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 769.984023] env[62923]: listener.cb(fileno) [ 769.984023] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.984023] env[62923]: result = function(*args, **kwargs) [ 769.984023] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 769.984023] env[62923]: return func(*args, **kwargs) [ 769.984023] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.984023] env[62923]: raise e [ 769.984023] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.984023] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 769.984023] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 769.984023] env[62923]: created_port_ids = self._update_ports_for_instance( [ 769.984023] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 769.984023] env[62923]: with excutils.save_and_reraise_exception(): [ 769.984023] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.984023] env[62923]: self.force_reraise() [ 769.984023] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.984023] env[62923]: raise self.value [ 769.984023] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 769.984023] env[62923]: updated_port = self._update_port( [ 769.984023] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.984023] env[62923]: _ensure_no_port_binding_failure(port) [ 769.984023] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.984023] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 769.985019] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 769.985019] env[62923]: Removing descriptor: 21 [ 769.989081] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866a791f-82d0-473e-91ae-9780f66a02a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.002115] env[62923]: ERROR nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Traceback (most recent call last): [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] yield resources [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self.driver.spawn(context, instance, image_meta, [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] vm_ref = self.build_virtual_machine(instance, [ 770.002115] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] for vif in network_info: [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] return self._sync_wrapper(fn, *args, **kwargs) [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self.wait() [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self[:] = self._gt.wait() [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] return self._exit_event.wait() [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 770.002535] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] current.throw(*self._exc) [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] result = function(*args, **kwargs) [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] return func(*args, **kwargs) [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] raise e [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] nwinfo = self.network_api.allocate_for_instance( [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] created_port_ids = self._update_ports_for_instance( [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] with excutils.save_and_reraise_exception(): [ 770.002947] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self.force_reraise() [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] raise self.value [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] updated_port = self._update_port( [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] _ensure_no_port_binding_failure(port) [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] raise exception.PortBindingFailed(port_id=port['id']) [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 770.003383] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] [ 770.003383] env[62923]: INFO nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Terminating instance [ 770.004172] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.227758] env[62923]: DEBUG nova.network.neutron [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.304139] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.304687] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 770.307255] env[62923]: DEBUG nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 770.309971] env[62923]: DEBUG nova.network.neutron [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.310977] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.981s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.813195] env[62923]: DEBUG nova.compute.utils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.815793] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 770.815793] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 770.817031] env[62923]: DEBUG oslo_concurrency.lockutils [req-4e531a03-4f55-4c54-b039-0c254b8a93c8 req-4f63cbcd-5605-4da0-b8fc-52e7cec65d35 service nova] Releasing lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.821326] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquired lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.821492] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.843258] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.915961] env[62923]: DEBUG nova.policy [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b24f096a24d047519ca27c2b360ed269', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b2113ade30448d2be99700eeeeda3f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 771.113017] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e53b221-8297-4a26-9bbe-b963f157a9c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.119047] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c7eff0-6205-427a-be07-857f02f74b5f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.151402] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aac8a1-e77d-4a8f-ba0c-f680ffcecd3f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.159215] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575aae82-56e6-4189-9dd4-4efd881c4a15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.175138] env[62923]: DEBUG nova.compute.provider_tree [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.322604] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 771.357130] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.456130] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Successfully created port: 4d97090d-1208-4f22-87ac-2148a63b69d1 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.682202] env[62923]: DEBUG nova.scheduler.client.report [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.717882] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.841480] env[62923]: DEBUG nova.compute.manager [req-621aebc6-e460-4d87-8b18-253e1795853b req-01cfa793-3c55-4853-b0f9-a42c6cdf4132 service nova] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Received event network-vif-deleted-165fb841-918f-4749-b4f0-ea003000049f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.188020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.877s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.188918] env[62923]: ERROR nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Traceback (most recent call last): [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self.driver.spawn(context, instance, image_meta, [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] vm_ref = self.build_virtual_machine(instance, [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.188918] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] for vif in network_info: [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] return self._sync_wrapper(fn, *args, **kwargs) [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self.wait() [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self[:] = self._gt.wait() [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] return self._exit_event.wait() [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] current.throw(*self._exc) [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.189495] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] result = function(*args, **kwargs) [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] return func(*args, **kwargs) [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] raise e [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] nwinfo = self.network_api.allocate_for_instance( [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] created_port_ids = self._update_ports_for_instance( [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] with excutils.save_and_reraise_exception(): [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] self.force_reraise() [ 772.190018] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] raise self.value [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] updated_port = self._update_port( [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] _ensure_no_port_binding_failure(port) [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] raise exception.PortBindingFailed(port_id=port['id']) [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] nova.exception.PortBindingFailed: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. [ 772.190506] env[62923]: ERROR nova.compute.manager [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] [ 772.190506] env[62923]: DEBUG nova.compute.utils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 772.191751] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.600s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.193549] env[62923]: INFO nova.compute.claims [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.196270] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Build of instance 9b334c72-11f5-4165-a350-09fe5487a9a0 was re-scheduled: Binding failed for port e410cdcf-56e9-4a0b-b47c-c855b5d11d1e, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 772.196776] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 772.197044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.197229] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.197416] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.220726] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Releasing lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.221155] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 772.221359] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 772.221634] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-835d018a-7fb3-4af0-8cea-b33c0f9333fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.231790] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906766b2-c550-4208-a5a7-092ecbc4f734 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.254133] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c22aa745-0e4a-40fd-903f-edba79cbf88b could not be found. [ 772.254406] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.254636] env[62923]: INFO nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 772.254846] env[62923]: DEBUG oslo.service.loopingcall [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.255269] env[62923]: DEBUG nova.compute.manager [-] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 772.255269] env[62923]: DEBUG nova.network.neutron [-] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.287070] env[62923]: DEBUG nova.network.neutron [-] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.333862] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 772.359986] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 772.360249] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 772.360400] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 772.360574] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 772.360713] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 772.360855] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 772.361078] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 772.361211] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 772.361656] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 772.361656] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 772.361856] env[62923]: DEBUG nova.virt.hardware [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 772.362825] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e559f1-2460-468d-a483-77ce962664ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.371038] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e86460-9283-44e0-980a-f7cc5d853e90 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.727482] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.793157] env[62923]: DEBUG nova.network.neutron [-] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.901183] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.940953] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.941069] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 772.941267] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 772.985753] env[62923]: ERROR nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 772.985753] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.985753] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.985753] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.985753] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.985753] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.985753] env[62923]: ERROR nova.compute.manager raise self.value [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.985753] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 772.985753] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.985753] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 772.986253] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.986253] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 772.986253] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 772.986253] env[62923]: ERROR nova.compute.manager [ 772.986253] env[62923]: Traceback (most recent call last): [ 772.986253] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 772.986253] env[62923]: listener.cb(fileno) [ 772.986253] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.986253] env[62923]: result = function(*args, **kwargs) [ 772.986253] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.986253] env[62923]: return func(*args, **kwargs) [ 772.986253] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.986253] env[62923]: raise e [ 772.986253] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.986253] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 772.986253] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.986253] env[62923]: created_port_ids = self._update_ports_for_instance( [ 772.986253] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.986253] env[62923]: with excutils.save_and_reraise_exception(): [ 772.986253] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.986253] env[62923]: self.force_reraise() [ 772.986253] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.986253] env[62923]: raise self.value [ 772.986253] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.986253] env[62923]: updated_port = self._update_port( [ 772.986253] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.986253] env[62923]: _ensure_no_port_binding_failure(port) [ 772.986253] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.986253] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 772.987245] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 772.987245] env[62923]: Removing descriptor: 21 [ 772.987245] env[62923]: ERROR nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Traceback (most recent call last): [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] yield resources [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self.driver.spawn(context, instance, image_meta, [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.987245] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] vm_ref = self.build_virtual_machine(instance, [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] for vif in network_info: [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return self._sync_wrapper(fn, *args, **kwargs) [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self.wait() [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self[:] = self._gt.wait() [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return self._exit_event.wait() [ 772.987638] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] result = hub.switch() [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return self.greenlet.switch() [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] result = function(*args, **kwargs) [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return func(*args, **kwargs) [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] raise e [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] nwinfo = self.network_api.allocate_for_instance( [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.988106] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] created_port_ids = self._update_ports_for_instance( [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] with excutils.save_and_reraise_exception(): [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self.force_reraise() [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] raise self.value [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] updated_port = self._update_port( [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] _ensure_no_port_binding_failure(port) [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.988668] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] raise exception.PortBindingFailed(port_id=port['id']) [ 772.989241] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 772.989241] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] [ 772.989241] env[62923]: INFO nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Terminating instance [ 772.989524] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.989833] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquired lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.989927] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.294884] env[62923]: INFO nova.compute.manager [-] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Took 1.04 seconds to deallocate network for instance. [ 773.297567] env[62923]: DEBUG nova.compute.claims [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 773.297765] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.405243] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-9b334c72-11f5-4165-a350-09fe5487a9a0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.405505] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 773.405677] env[62923]: DEBUG nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.405840] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 773.419759] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.445934] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.445934] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.445934] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.445934] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.446186] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.446186] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.446263] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 773.446555] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Didn't find any instances for network info cache update. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 773.448652] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.448840] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Cleaning up deleted instances {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 773.471166] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26497c74-7b85-4b4d-8cd9-6cb7c6c310c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.479657] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ee1e58-add9-4954-9125-b69c27094f46 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.512888] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a8d3aa-d335-403c-bed8-8a0050047869 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.520265] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dbbb3aa-ab35-4432-80ca-cfadefed109d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.533688] env[62923]: DEBUG nova.compute.provider_tree [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.535771] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.655558] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.907423] env[62923]: DEBUG nova.compute.manager [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Received event network-changed-4d97090d-1208-4f22-87ac-2148a63b69d1 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.907681] env[62923]: DEBUG nova.compute.manager [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Refreshing instance network info cache due to event network-changed-4d97090d-1208-4f22-87ac-2148a63b69d1. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 773.907872] env[62923]: DEBUG oslo_concurrency.lockutils [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] Acquiring lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.922123] env[62923]: DEBUG nova.network.neutron [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.955392] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] There are 2 instances to clean {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 773.955715] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 6ca62d1b-9533-4b83-8e8a-7f62a34c90a3] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 774.039046] env[62923]: DEBUG nova.scheduler.client.report [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.157775] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Releasing lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.158287] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 774.158515] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.158866] env[62923]: DEBUG oslo_concurrency.lockutils [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] Acquired lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.159048] env[62923]: DEBUG nova.network.neutron [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Refreshing network info cache for port 4d97090d-1208-4f22-87ac-2148a63b69d1 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.161180] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8e62a7b-9cee-475e-ad98-bbe30d35458c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.171187] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e236e20b-4ff8-4eed-8178-733c297960db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.197594] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 71dd8aff-4500-4c91-8a46-2a398fd03560 could not be found. [ 774.198103] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 774.198103] env[62923]: INFO nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Took 0.04 seconds to destroy the instance on the hypervisor. [ 774.198254] env[62923]: DEBUG oslo.service.loopingcall [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.198442] env[62923]: DEBUG nova.compute.manager [-] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 774.198549] env[62923]: DEBUG nova.network.neutron [-] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 774.213287] env[62923]: DEBUG nova.network.neutron [-] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.425570] env[62923]: INFO nova.compute.manager [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9b334c72-11f5-4165-a350-09fe5487a9a0] Took 1.02 seconds to deallocate network for instance. [ 774.459148] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: a33da17c-bbb2-4307-b4b3-56cec5cb757e] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 774.546322] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.546803] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 774.549294] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.760s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.679147] env[62923]: DEBUG nova.network.neutron [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.715657] env[62923]: DEBUG nova.network.neutron [-] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.750647] env[62923]: DEBUG nova.network.neutron [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.962854] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.963161] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Cleaning up deleted instances with incomplete migration {{(pid=62923) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 775.053990] env[62923]: DEBUG nova.compute.utils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.058136] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 775.058290] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.117198] env[62923]: DEBUG nova.policy [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b24f096a24d047519ca27c2b360ed269', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6b2113ade30448d2be99700eeeeda3f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 775.218917] env[62923]: INFO nova.compute.manager [-] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Took 1.02 seconds to deallocate network for instance. [ 775.224405] env[62923]: DEBUG nova.compute.claims [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 775.224405] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.253225] env[62923]: DEBUG oslo_concurrency.lockutils [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] Releasing lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.255112] env[62923]: DEBUG nova.compute.manager [req-5d191a46-5482-4c08-86ba-b99945324323 req-4e5be82a-b253-48fc-b933-4289db31d9eb service nova] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Received event network-vif-deleted-4d97090d-1208-4f22-87ac-2148a63b69d1 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 775.328182] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a358b34-07b0-4b92-8ee9-28356c572725 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.336695] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dd68c5-5597-4abc-adee-840f1e36cb32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.366452] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee0ec88-7bf0-4ae1-825c-65475aa155bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.373143] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e715d6-998d-4548-be72-da9559d99e90 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.385964] env[62923]: DEBUG nova.compute.provider_tree [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.456668] env[62923]: INFO nova.scheduler.client.report [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 9b334c72-11f5-4165-a350-09fe5487a9a0 [ 775.464921] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.471338] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Successfully created port: baa246a8-c6fe-472f-9a07-faacecad6be2 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.561499] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 775.892027] env[62923]: DEBUG nova.scheduler.client.report [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.965666] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2817bd26-38ec-4948-87ff-3f9fe2ae4cee tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9b334c72-11f5-4165-a350-09fe5487a9a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.496s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.173814] env[62923]: DEBUG nova.compute.manager [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Received event network-changed-baa246a8-c6fe-472f-9a07-faacecad6be2 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 776.174029] env[62923]: DEBUG nova.compute.manager [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Refreshing instance network info cache due to event network-changed-baa246a8-c6fe-472f-9a07-faacecad6be2. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 776.174245] env[62923]: DEBUG oslo_concurrency.lockutils [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] Acquiring lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.174446] env[62923]: DEBUG oslo_concurrency.lockutils [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] Acquired lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.174612] env[62923]: DEBUG nova.network.neutron [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Refreshing network info cache for port baa246a8-c6fe-472f-9a07-faacecad6be2 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.394968] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.846s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.395641] env[62923]: ERROR nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Traceback (most recent call last): [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self.driver.spawn(context, instance, image_meta, [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] vm_ref = self.build_virtual_machine(instance, [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.395641] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] for vif in network_info: [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return self._sync_wrapper(fn, *args, **kwargs) [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self.wait() [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self[:] = self._gt.wait() [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return self._exit_event.wait() [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] result = hub.switch() [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 776.396072] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return self.greenlet.switch() [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] result = function(*args, **kwargs) [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] return func(*args, **kwargs) [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] raise e [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] nwinfo = self.network_api.allocate_for_instance( [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] created_port_ids = self._update_ports_for_instance( [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] with excutils.save_and_reraise_exception(): [ 776.396444] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] self.force_reraise() [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] raise self.value [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] updated_port = self._update_port( [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] _ensure_no_port_binding_failure(port) [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] raise exception.PortBindingFailed(port_id=port['id']) [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] nova.exception.PortBindingFailed: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. [ 776.396797] env[62923]: ERROR nova.compute.manager [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] [ 776.397113] env[62923]: DEBUG nova.compute.utils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 776.397622] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.888s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.400450] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Build of instance bf0222ef-b86f-4d85-ab75-96661b90a4b4 was re-scheduled: Binding failed for port 0d3638e8-403e-4555-a591-7607de7d759f, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 776.400881] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 776.401114] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquiring lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.401258] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Acquired lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.401407] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.463489] env[62923]: ERROR nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 776.463489] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.463489] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.463489] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.463489] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.463489] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.463489] env[62923]: ERROR nova.compute.manager raise self.value [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.463489] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 776.463489] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.463489] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 776.463984] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.463984] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 776.463984] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 776.463984] env[62923]: ERROR nova.compute.manager [ 776.463984] env[62923]: Traceback (most recent call last): [ 776.463984] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 776.463984] env[62923]: listener.cb(fileno) [ 776.463984] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.463984] env[62923]: result = function(*args, **kwargs) [ 776.463984] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.463984] env[62923]: return func(*args, **kwargs) [ 776.463984] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.463984] env[62923]: raise e [ 776.463984] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.463984] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 776.463984] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.463984] env[62923]: created_port_ids = self._update_ports_for_instance( [ 776.463984] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.463984] env[62923]: with excutils.save_and_reraise_exception(): [ 776.463984] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.463984] env[62923]: self.force_reraise() [ 776.463984] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.463984] env[62923]: raise self.value [ 776.463984] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.463984] env[62923]: updated_port = self._update_port( [ 776.463984] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.463984] env[62923]: _ensure_no_port_binding_failure(port) [ 776.463984] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.463984] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 776.465440] env[62923]: nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 776.465440] env[62923]: Removing descriptor: 21 [ 776.470140] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 776.571573] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 776.596916] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 776.597173] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 776.597328] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.597497] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 776.597640] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.597782] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 776.598036] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 776.598144] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 776.598323] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 776.598498] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 776.598666] env[62923]: DEBUG nova.virt.hardware [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 776.599560] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b72a0f4-60b1-49a2-b4f0-03577987d164 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.607674] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbda9c3-160d-45ff-9731-1166313b1e0b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.620759] env[62923]: ERROR nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Traceback (most recent call last): [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] yield resources [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self.driver.spawn(context, instance, image_meta, [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] vm_ref = self.build_virtual_machine(instance, [ 776.620759] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] for vif in network_info: [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] return self._sync_wrapper(fn, *args, **kwargs) [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self.wait() [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self[:] = self._gt.wait() [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] return self._exit_event.wait() [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 776.621182] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] current.throw(*self._exc) [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] result = function(*args, **kwargs) [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] return func(*args, **kwargs) [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] raise e [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] nwinfo = self.network_api.allocate_for_instance( [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] created_port_ids = self._update_ports_for_instance( [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] with excutils.save_and_reraise_exception(): [ 776.621609] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self.force_reraise() [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] raise self.value [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] updated_port = self._update_port( [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] _ensure_no_port_binding_failure(port) [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] raise exception.PortBindingFailed(port_id=port['id']) [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 776.622032] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] [ 776.622032] env[62923]: INFO nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Terminating instance [ 776.622985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.693788] env[62923]: DEBUG nova.network.neutron [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.762758] env[62923]: DEBUG nova.network.neutron [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.941181] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.961963] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.964632] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.965715] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.965715] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.965715] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.965715] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.965715] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 776.966274] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.988457] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.187320] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d253b513-f10b-4bf6-b36d-ce6f738b9268 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.197049] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b92ea6b-4529-4317-915a-ead9bd5e7ea5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.228234] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaab0af-da22-452a-89fb-ef15c02eb291 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.234748] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bb0fa5-794e-4628-b762-f8bf99dbb831 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.255163] env[62923]: DEBUG nova.compute.provider_tree [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.265177] env[62923]: DEBUG oslo_concurrency.lockutils [req-74fa7194-df01-4061-9c34-db4d7f54423d req-30b2548a-59a6-4636-94b5-d66cc23d4f9b service nova] Releasing lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.266094] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquired lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.266366] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.275285] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.468862] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.759043] env[62923]: DEBUG nova.scheduler.client.report [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 777.777876] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Releasing lock "refresh_cache-bf0222ef-b86f-4d85-ab75-96661b90a4b4" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.778187] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 777.778393] env[62923]: DEBUG nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 777.778562] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.788526] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.796106] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.860494] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.965154] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "7c3edceb-cc58-4925-a97a-3204936c836d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.965390] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "7c3edceb-cc58-4925-a97a-3204936c836d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.201020] env[62923]: DEBUG nova.compute.manager [req-ed3fddae-03a6-4e35-9b8a-5110549ac1da req-9ce1d97c-46d7-4d38-93fa-093c0a2631d6 service nova] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Received event network-vif-deleted-baa246a8-c6fe-472f-9a07-faacecad6be2 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.263805] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.866s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.264511] env[62923]: ERROR nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Traceback (most recent call last): [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self.driver.spawn(context, instance, image_meta, [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] vm_ref = self.build_virtual_machine(instance, [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.264511] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] for vif in network_info: [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return self._sync_wrapper(fn, *args, **kwargs) [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self.wait() [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self[:] = self._gt.wait() [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return self._exit_event.wait() [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] result = hub.switch() [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 778.264942] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return self.greenlet.switch() [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] result = function(*args, **kwargs) [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] return func(*args, **kwargs) [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] raise e [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] nwinfo = self.network_api.allocate_for_instance( [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] created_port_ids = self._update_ports_for_instance( [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] with excutils.save_and_reraise_exception(): [ 778.265569] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] self.force_reraise() [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] raise self.value [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] updated_port = self._update_port( [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] _ensure_no_port_binding_failure(port) [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] raise exception.PortBindingFailed(port_id=port['id']) [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] nova.exception.PortBindingFailed: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. [ 778.266171] env[62923]: ERROR nova.compute.manager [instance: ac14f710-41c0-429c-92a3-46acceace3fc] [ 778.266643] env[62923]: DEBUG nova.compute.utils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 778.266643] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.699s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.270617] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Build of instance ac14f710-41c0-429c-92a3-46acceace3fc was re-scheduled: Binding failed for port 44334227-49c0-4835-986e-235c82f82d3d, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 778.271239] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 778.271461] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.271607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.271762] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.298870] env[62923]: DEBUG nova.network.neutron [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.362710] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Releasing lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.363142] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 778.363337] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.363636] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0af37a8e-df10-4622-bfac-3f3c71792304 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.372418] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63927de0-b311-4e12-99cc-38aa13af40c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.393053] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d65ce1f0-e9de-4fc8-828b-95aec5615f95 could not be found. [ 778.393295] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 778.393439] env[62923]: INFO nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Took 0.03 seconds to destroy the instance on the hypervisor. [ 778.394091] env[62923]: DEBUG oslo.service.loopingcall [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.394311] env[62923]: DEBUG nova.compute.manager [-] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 778.394458] env[62923]: DEBUG nova.network.neutron [-] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 778.413366] env[62923]: DEBUG nova.network.neutron [-] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.788283] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.801532] env[62923]: INFO nova.compute.manager [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] [instance: bf0222ef-b86f-4d85-ab75-96661b90a4b4] Took 1.02 seconds to deallocate network for instance. [ 778.873765] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.915815] env[62923]: DEBUG nova.network.neutron [-] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.993857] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe6721c-c8dc-47fe-8ec0-4e7335fbc720 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.001483] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb06f72-0522-47f2-b4b5-f8bf238da46b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.032118] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42829527-5dcc-4887-baf9-a36c0e2213ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.039023] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9167c909-d168-49c5-be52-1cbbb6b61064 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.051719] env[62923]: DEBUG nova.compute.provider_tree [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.377754] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-ac14f710-41c0-429c-92a3-46acceace3fc" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.378051] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 779.378176] env[62923]: DEBUG nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 779.378335] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.397540] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.417802] env[62923]: INFO nova.compute.manager [-] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Took 1.02 seconds to deallocate network for instance. [ 779.422773] env[62923]: DEBUG nova.compute.claims [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 779.422946] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.555325] env[62923]: DEBUG nova.scheduler.client.report [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.833614] env[62923]: INFO nova.scheduler.client.report [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Deleted allocations for instance bf0222ef-b86f-4d85-ab75-96661b90a4b4 [ 779.900502] env[62923]: DEBUG nova.network.neutron [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.060950] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.794s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.061696] env[62923]: ERROR nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Traceback (most recent call last): [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self.driver.spawn(context, instance, image_meta, [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] vm_ref = self.build_virtual_machine(instance, [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.061696] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] for vif in network_info: [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return self._sync_wrapper(fn, *args, **kwargs) [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self.wait() [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self[:] = self._gt.wait() [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return self._exit_event.wait() [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] result = hub.switch() [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 780.062189] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return self.greenlet.switch() [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] result = function(*args, **kwargs) [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] return func(*args, **kwargs) [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] raise e [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] nwinfo = self.network_api.allocate_for_instance( [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] created_port_ids = self._update_ports_for_instance( [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] with excutils.save_and_reraise_exception(): [ 780.062632] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] self.force_reraise() [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] raise self.value [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] updated_port = self._update_port( [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] _ensure_no_port_binding_failure(port) [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] raise exception.PortBindingFailed(port_id=port['id']) [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] nova.exception.PortBindingFailed: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. [ 780.063064] env[62923]: ERROR nova.compute.manager [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] [ 780.063422] env[62923]: DEBUG nova.compute.utils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 780.063582] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.117s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.067060] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Build of instance 78daba16-0c0f-4db6-bde1-70d960a6e7ae was re-scheduled: Binding failed for port b7b683c3-cbd9-4f68-a369-ca8feccb9ba7, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 780.067060] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 780.067362] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquiring lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.067404] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Acquired lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.067582] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.341926] env[62923]: DEBUG oslo_concurrency.lockutils [None req-54e36c14-5ff3-4bf1-b869-0a84c38546c6 tempest-ServerRescueTestJSON-1010897544 tempest-ServerRescueTestJSON-1010897544-project-member] Lock "bf0222ef-b86f-4d85-ab75-96661b90a4b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.163s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.402844] env[62923]: INFO nova.compute.manager [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: ac14f710-41c0-429c-92a3-46acceace3fc] Took 1.02 seconds to deallocate network for instance. [ 780.588847] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.696623] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.806028] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e7d65e-f6a3-4587-a5ea-e48a67683a56 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.815459] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a3dff2-2b5c-44bc-a81e-5a76fa47e2a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.845713] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 780.853114] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7074fa7-4c63-4a8c-b850-5a51c8cdb293 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.858222] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d563cfe-3235-4a6a-8535-0ae13702bf11 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.873233] env[62923]: DEBUG nova.compute.provider_tree [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.199301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Releasing lock "refresh_cache-78daba16-0c0f-4db6-bde1-70d960a6e7ae" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.199542] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 781.199727] env[62923]: DEBUG nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.199889] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.218393] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.370067] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.376080] env[62923]: DEBUG nova.scheduler.client.report [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.429321] env[62923]: INFO nova.scheduler.client.report [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocations for instance ac14f710-41c0-429c-92a3-46acceace3fc [ 781.721257] env[62923]: DEBUG nova.network.neutron [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.882381] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.817s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.882381] env[62923]: ERROR nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Traceback (most recent call last): [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self.driver.spawn(context, instance, image_meta, [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 781.882381] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] vm_ref = self.build_virtual_machine(instance, [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] vif_infos = vmwarevif.get_vif_info(self._session, [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] for vif in network_info: [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return self._sync_wrapper(fn, *args, **kwargs) [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self.wait() [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self[:] = self._gt.wait() [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return self._exit_event.wait() [ 781.882705] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] result = hub.switch() [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return self.greenlet.switch() [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] result = function(*args, **kwargs) [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] return func(*args, **kwargs) [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] raise e [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] nwinfo = self.network_api.allocate_for_instance( [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 781.883077] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] created_port_ids = self._update_ports_for_instance( [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] with excutils.save_and_reraise_exception(): [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] self.force_reraise() [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] raise self.value [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] updated_port = self._update_port( [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] _ensure_no_port_binding_failure(port) [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.883427] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] raise exception.PortBindingFailed(port_id=port['id']) [ 781.883759] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] nova.exception.PortBindingFailed: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. [ 781.883759] env[62923]: ERROR nova.compute.manager [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] [ 781.883759] env[62923]: DEBUG nova.compute.utils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 781.883857] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.936s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.885900] env[62923]: INFO nova.compute.claims [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.888823] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Build of instance 7831bfb8-b336-4338-923f-c759a5c67c06 was re-scheduled: Binding failed for port db91e8e4-4ec7-42cf-a81e-46717ae18c5b, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 781.889329] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 781.889778] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquiring lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.889851] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Acquired lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.890207] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 781.938875] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a7c7ec6c-c6a9-441a-bae5-aaada85ff359 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "ac14f710-41c0-429c-92a3-46acceace3fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.311s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.225124] env[62923]: INFO nova.compute.manager [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] [instance: 78daba16-0c0f-4db6-bde1-70d960a6e7ae] Took 1.02 seconds to deallocate network for instance. [ 782.415411] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.441363] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 782.522155] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.008869] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.024320] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Releasing lock "refresh_cache-7831bfb8-b336-4338-923f-c759a5c67c06" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.024598] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 783.024771] env[62923]: DEBUG nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.024926] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.044715] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.252930] env[62923]: INFO nova.scheduler.client.report [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Deleted allocations for instance 78daba16-0c0f-4db6-bde1-70d960a6e7ae [ 783.272114] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a423b131-850a-4c01-be4d-7f77c97a3bd9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.280200] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7ec1c9-de7d-494c-b016-60c20fb98bc4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.312240] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f967dc0e-d6fb-4023-bb2e-47c98ce9a10a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.319995] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e338c08e-b7b1-4115-b344-b21fa51cc136 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.333258] env[62923]: DEBUG nova.compute.provider_tree [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.547941] env[62923]: DEBUG nova.network.neutron [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.599887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "880cce70-5a0c-40a6-91b5-73d074feab6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.600130] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.764628] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d81e160-a5d5-48a4-9d72-68104095d7d6 tempest-ServerMetadataTestJSON-1643729542 tempest-ServerMetadataTestJSON-1643729542-project-member] Lock "78daba16-0c0f-4db6-bde1-70d960a6e7ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.716s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.837056] env[62923]: DEBUG nova.scheduler.client.report [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.051598] env[62923]: INFO nova.compute.manager [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] [instance: 7831bfb8-b336-4338-923f-c759a5c67c06] Took 1.03 seconds to deallocate network for instance. [ 784.266463] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 784.341066] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.341603] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.344194] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.501s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.345973] env[62923]: INFO nova.compute.claims [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.795366] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.850097] env[62923]: DEBUG nova.compute.utils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.854558] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 784.856975] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.928646] env[62923]: DEBUG nova.policy [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd2eb4fc8d9b47db8b8384731aff11b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '035c2eb849e3432e8cb52d31d69b895b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.084016] env[62923]: INFO nova.scheduler.client.report [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Deleted allocations for instance 7831bfb8-b336-4338-923f-c759a5c67c06 [ 785.358441] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.372900] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "92c59517-7e6f-45bd-8211-789a718d66d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.373137] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "92c59517-7e6f-45bd-8211-789a718d66d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.387455] env[62923]: DEBUG nova.scheduler.client.report [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 785.390768] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Successfully created port: 5de6e97b-085e-43cc-9bee-0101cf0d68d8 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.406297] env[62923]: DEBUG nova.scheduler.client.report [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 785.406892] env[62923]: DEBUG nova.compute.provider_tree [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 785.423103] env[62923]: DEBUG nova.scheduler.client.report [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 785.445551] env[62923]: DEBUG nova.scheduler.client.report [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 785.598235] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ea63a99-fcda-42cd-979e-37b39a8502e1 tempest-ServerAddressesNegativeTestJSON-347589147 tempest-ServerAddressesNegativeTestJSON-347589147-project-member] Lock "7831bfb8-b336-4338-923f-c759a5c67c06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.094s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.690870] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac30c6f8-e070-4cd5-a926-0d6fa7d88219 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.698759] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8500f09-e6f6-49af-8f10-e89686ce71fe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.730455] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9897b58-b74a-45d7-bc6b-fa0aab71d4d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.738052] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c26ab51-ac58-447f-8535-8ab5c58def94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.751420] env[62923]: DEBUG nova.compute.provider_tree [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.100524] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 786.258352] env[62923]: DEBUG nova.scheduler.client.report [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.308561] env[62923]: DEBUG nova.compute.manager [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Received event network-changed-5de6e97b-085e-43cc-9bee-0101cf0d68d8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.309132] env[62923]: DEBUG nova.compute.manager [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Refreshing instance network info cache due to event network-changed-5de6e97b-085e-43cc-9bee-0101cf0d68d8. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 786.309132] env[62923]: DEBUG oslo_concurrency.lockutils [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] Acquiring lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.309298] env[62923]: DEBUG oslo_concurrency.lockutils [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] Acquired lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.309462] env[62923]: DEBUG nova.network.neutron [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Refreshing network info cache for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.370562] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.397527] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.397769] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.398831] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.398831] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.398831] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.398831] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.398831] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.399104] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.399104] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.399104] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.399857] env[62923]: DEBUG nova.virt.hardware [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.400192] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f5e900-74a9-499c-a084-4209b6a5beb2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.408364] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9510f85-7bdb-48c1-a7d8-5e878d3b8515 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.462107] env[62923]: ERROR nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 786.462107] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.462107] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.462107] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.462107] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.462107] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.462107] env[62923]: ERROR nova.compute.manager raise self.value [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.462107] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 786.462107] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.462107] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 786.462672] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.462672] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 786.462672] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 786.462672] env[62923]: ERROR nova.compute.manager [ 786.462672] env[62923]: Traceback (most recent call last): [ 786.462672] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 786.462672] env[62923]: listener.cb(fileno) [ 786.462672] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.462672] env[62923]: result = function(*args, **kwargs) [ 786.462672] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.462672] env[62923]: return func(*args, **kwargs) [ 786.462672] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.462672] env[62923]: raise e [ 786.462672] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.462672] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 786.462672] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.462672] env[62923]: created_port_ids = self._update_ports_for_instance( [ 786.462672] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.462672] env[62923]: with excutils.save_and_reraise_exception(): [ 786.462672] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.462672] env[62923]: self.force_reraise() [ 786.462672] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.462672] env[62923]: raise self.value [ 786.462672] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.462672] env[62923]: updated_port = self._update_port( [ 786.462672] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.462672] env[62923]: _ensure_no_port_binding_failure(port) [ 786.462672] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.462672] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 786.463581] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 786.463581] env[62923]: Removing descriptor: 21 [ 786.463581] env[62923]: ERROR nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Traceback (most recent call last): [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] yield resources [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self.driver.spawn(context, instance, image_meta, [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.463581] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] vm_ref = self.build_virtual_machine(instance, [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] for vif in network_info: [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return self._sync_wrapper(fn, *args, **kwargs) [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self.wait() [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self[:] = self._gt.wait() [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return self._exit_event.wait() [ 786.464068] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] result = hub.switch() [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return self.greenlet.switch() [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] result = function(*args, **kwargs) [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return func(*args, **kwargs) [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] raise e [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] nwinfo = self.network_api.allocate_for_instance( [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 786.464554] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] created_port_ids = self._update_ports_for_instance( [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] with excutils.save_and_reraise_exception(): [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self.force_reraise() [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] raise self.value [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] updated_port = self._update_port( [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] _ensure_no_port_binding_failure(port) [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.464979] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] raise exception.PortBindingFailed(port_id=port['id']) [ 786.465397] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 786.465397] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] [ 786.465397] env[62923]: INFO nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Terminating instance [ 786.465397] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.632160] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.763071] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.763595] env[62923]: DEBUG nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 786.766179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.468s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.830893] env[62923]: DEBUG nova.network.neutron [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.957432] env[62923]: DEBUG nova.network.neutron [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.274946] env[62923]: DEBUG nova.compute.utils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.277464] env[62923]: DEBUG nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 787.465613] env[62923]: DEBUG oslo_concurrency.lockutils [req-f74f5428-b038-4c71-bda5-a93a6d9031aa req-1493e815-5568-47ba-be94-ba729f1745f0 service nova] Releasing lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.466068] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquired lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.466273] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.517840] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a15519-4b48-42f3-8bcc-e2084fd80a0f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.525381] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d060d5-b04f-4e9f-bee7-e55ab50c0d5d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.283615] env[62923]: DEBUG nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 788.288688] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d0abf8-5f63-47ed-93da-8be841e31540 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.296639] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037fe7d3-fba8-4100-9211-ba161c9d8f9b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.309608] env[62923]: DEBUG nova.compute.provider_tree [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.311369] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.338154] env[62923]: DEBUG nova.compute.manager [req-6249262d-b107-488c-b2a0-1d58c678e829 req-2c018bca-42f1-429a-b234-54ecc834d9aa service nova] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Received event network-vif-deleted-5de6e97b-085e-43cc-9bee-0101cf0d68d8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.544836] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.814729] env[62923]: DEBUG nova.scheduler.client.report [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 789.047440] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Releasing lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.047908] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 789.048122] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 789.048432] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94894fed-e54e-422e-9d94-4fe827edfe7b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.057643] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b887bd3-34a3-4adf-b9fc-79f977f05833 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.078296] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f76d2304-7a4e-4f18-80de-ecb0b67bec28 could not be found. [ 789.078897] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.078897] env[62923]: INFO nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Took 0.03 seconds to destroy the instance on the hypervisor. [ 789.078897] env[62923]: DEBUG oslo.service.loopingcall [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.079076] env[62923]: DEBUG nova.compute.manager [-] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.079170] env[62923]: DEBUG nova.network.neutron [-] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 789.092992] env[62923]: DEBUG nova.network.neutron [-] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.291401] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "94d2670f-d858-437a-a166-d148a57e07ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.291401] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "94d2670f-d858-437a-a166-d148a57e07ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.295703] env[62923]: DEBUG nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 789.320599] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 789.320821] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 789.320969] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.321161] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 789.321340] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.321440] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 789.321637] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 789.321787] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 789.321942] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 789.322110] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 789.322274] env[62923]: DEBUG nova.virt.hardware [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 789.322942] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.557s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.323516] env[62923]: ERROR nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Traceback (most recent call last): [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self.driver.spawn(context, instance, image_meta, [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] vm_ref = self.build_virtual_machine(instance, [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] vif_infos = vmwarevif.get_vif_info(self._session, [ 789.323516] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] for vif in network_info: [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] return self._sync_wrapper(fn, *args, **kwargs) [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self.wait() [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self[:] = self._gt.wait() [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] return self._exit_event.wait() [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] current.throw(*self._exc) [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 789.323783] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] result = function(*args, **kwargs) [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] return func(*args, **kwargs) [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] raise e [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] nwinfo = self.network_api.allocate_for_instance( [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] created_port_ids = self._update_ports_for_instance( [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] with excutils.save_and_reraise_exception(): [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] self.force_reraise() [ 789.324092] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] raise self.value [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] updated_port = self._update_port( [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] _ensure_no_port_binding_failure(port) [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] raise exception.PortBindingFailed(port_id=port['id']) [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] nova.exception.PortBindingFailed: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. [ 789.324387] env[62923]: ERROR nova.compute.manager [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] [ 789.324387] env[62923]: DEBUG nova.compute.utils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 789.325662] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Build of instance c22aa745-0e4a-40fd-903f-edba79cbf88b was re-scheduled: Binding failed for port 165fb841-918f-4749-b4f0-ea003000049f, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 789.326084] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 789.326307] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.326451] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquired lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.326641] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.328093] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2753ec-0b0e-4e29-b3f7-523a0a9b8dad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.331245] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.107s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.340120] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee2fe39-321c-4c3c-9921-5e8d58de2d81 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.353151] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.358474] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Creating folder: Project (acbb73e2cf5447c1a7582648f35480f7). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.358713] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8b3df1f-cba6-4550-af20-5f47ce7d0f52 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.368096] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Created folder: Project (acbb73e2cf5447c1a7582648f35480f7) in parent group-v291405. [ 789.368270] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Creating folder: Instances. Parent ref: group-v291418. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.368473] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4265e89c-00c8-4b84-9cc4-dcfae5b107bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.377781] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Created folder: Instances in parent group-v291418. [ 789.377995] env[62923]: DEBUG oslo.service.loopingcall [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.378189] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.378816] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ddc08b3-1183-44ec-b881-a07dce207f07 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.393905] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.393905] env[62923]: value = "task-1369873" [ 789.393905] env[62923]: _type = "Task" [ 789.393905] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.400802] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369873, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.453723] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "67a83e64-c8bd-499c-895a-11976d69195b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.453948] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "67a83e64-c8bd-499c-895a-11976d69195b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.597885] env[62923]: DEBUG nova.network.neutron [-] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.845717] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.902932] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369873, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.950645] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.087621] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75c848b-c0b0-460a-a977-ccd2cd60182a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.095451] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0554b6a4-61de-413d-bea6-36f7c598b110 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.100540] env[62923]: INFO nova.compute.manager [-] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Took 1.02 seconds to deallocate network for instance. [ 790.126124] env[62923]: DEBUG nova.compute.claims [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 790.126312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.127034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4acf302-2bcc-4079-afba-ecb962999ed0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.133995] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4a0cec-01bd-4e88-af91-5e5851bfe193 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.147134] env[62923]: DEBUG nova.compute.provider_tree [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.403942] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369873, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.452964] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Releasing lock "refresh_cache-c22aa745-0e4a-40fd-903f-edba79cbf88b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.453211] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 790.453387] env[62923]: DEBUG nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 790.453549] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.467860] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.650356] env[62923]: DEBUG nova.scheduler.client.report [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.904701] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369873, 'name': CreateVM_Task, 'duration_secs': 1.245886} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.904865] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.905289] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.905466] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.905839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 790.906096] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dd1537b-1779-4949-b30a-0ebc1af85118 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.910205] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 790.910205] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52674207-24fa-73ef-ba25-9a1aaabe7692" [ 790.910205] env[62923]: _type = "Task" [ 790.910205] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.917344] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52674207-24fa-73ef-ba25-9a1aaabe7692, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.970481] env[62923]: DEBUG nova.network.neutron [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.155312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.824s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.155954] env[62923]: ERROR nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Traceback (most recent call last): [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self.driver.spawn(context, instance, image_meta, [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] vm_ref = self.build_virtual_machine(instance, [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.155954] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] for vif in network_info: [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return self._sync_wrapper(fn, *args, **kwargs) [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self.wait() [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self[:] = self._gt.wait() [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return self._exit_event.wait() [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] result = hub.switch() [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 791.156265] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return self.greenlet.switch() [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] result = function(*args, **kwargs) [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] return func(*args, **kwargs) [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] raise e [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] nwinfo = self.network_api.allocate_for_instance( [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] created_port_ids = self._update_ports_for_instance( [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] with excutils.save_and_reraise_exception(): [ 791.156579] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] self.force_reraise() [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] raise self.value [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] updated_port = self._update_port( [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] _ensure_no_port_binding_failure(port) [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] raise exception.PortBindingFailed(port_id=port['id']) [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] nova.exception.PortBindingFailed: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. [ 791.156902] env[62923]: ERROR nova.compute.manager [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] [ 791.157224] env[62923]: DEBUG nova.compute.utils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 791.157863] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.170s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.159674] env[62923]: INFO nova.compute.claims [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.162200] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Build of instance 71dd8aff-4500-4c91-8a46-2a398fd03560 was re-scheduled: Binding failed for port 4d97090d-1208-4f22-87ac-2148a63b69d1, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 791.162595] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 791.162811] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.162949] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquired lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.163112] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.420777] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52674207-24fa-73ef-ba25-9a1aaabe7692, 'name': SearchDatastore_Task, 'duration_secs': 0.00899} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.421077] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.421273] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.421498] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.421639] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.421812] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.422075] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aba1ac62-8429-4d48-824a-aba38d87261e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.429700] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.429867] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.430527] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1dcf384-383a-4589-bc47-80053d896ebf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.435474] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 791.435474] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ad5e38-f04c-fdc4-adac-bda8676d040b" [ 791.435474] env[62923]: _type = "Task" [ 791.435474] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.443718] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ad5e38-f04c-fdc4-adac-bda8676d040b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.472720] env[62923]: INFO nova.compute.manager [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: c22aa745-0e4a-40fd-903f-edba79cbf88b] Took 1.02 seconds to deallocate network for instance. [ 791.698860] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.786517] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.945023] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ad5e38-f04c-fdc4-adac-bda8676d040b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.288892] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Releasing lock "refresh_cache-71dd8aff-4500-4c91-8a46-2a398fd03560" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.289141] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 792.289327] env[62923]: DEBUG nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 792.289491] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.308628] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.390831] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f8cbbb-1495-46e6-bce7-e29e9ee94bbb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.398250] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863bce2c-6ba2-4f5f-8629-a0500b682fb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.427649] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0317e399-e8b0-4770-9ddb-e7f4772eb560 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.434885] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f0dc0a-4616-4fdb-8aa2-49f50409328a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.451076] env[62923]: DEBUG nova.compute.provider_tree [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.455259] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ad5e38-f04c-fdc4-adac-bda8676d040b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.499466] env[62923]: INFO nova.scheduler.client.report [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Deleted allocations for instance c22aa745-0e4a-40fd-903f-edba79cbf88b [ 792.811105] env[62923]: DEBUG nova.network.neutron [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.949083] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ad5e38-f04c-fdc4-adac-bda8676d040b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.957392] env[62923]: DEBUG nova.scheduler.client.report [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 793.009865] env[62923]: DEBUG oslo_concurrency.lockutils [None req-93472835-ebe4-42a1-86cd-be81deae0eab tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "c22aa745-0e4a-40fd-903f-edba79cbf88b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.235s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.313987] env[62923]: INFO nova.compute.manager [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: 71dd8aff-4500-4c91-8a46-2a398fd03560] Took 1.02 seconds to deallocate network for instance. [ 793.450299] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ad5e38-f04c-fdc4-adac-bda8676d040b, 'name': SearchDatastore_Task, 'duration_secs': 1.602301} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.451128] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea8a006f-a4c9-4064-936e-fd172338a652 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.455923] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 793.455923] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526fb863-917c-37a6-9953-0d22f60777d6" [ 793.455923] env[62923]: _type = "Task" [ 793.455923] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.464027] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.464027] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 793.466417] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526fb863-917c-37a6-9953-0d22f60777d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.466633] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.998s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.466789] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.466929] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 793.467201] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.044s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.471950] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd39a642-e14e-4126-a416-7013a340e4b2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.478289] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c001babf-f98a-4761-b542-ac513008ec8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.491890] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282b4cdb-e9f2-4f7c-9f97-1552e48346c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.498320] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4131d7a2-c248-4af8-b691-c296ccac5773 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.529043] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.532055] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181484MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 793.532145] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.966860] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526fb863-917c-37a6-9953-0d22f60777d6, 'name': SearchDatastore_Task, 'duration_secs': 0.008932} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.966860] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.966860] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 793.967029] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9efc49c4-5860-4cd5-908d-91879ca800ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.974917] env[62923]: DEBUG nova.compute.utils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 793.977369] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 793.977536] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.979813] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 793.979813] env[62923]: value = "task-1369874" [ 793.979813] env[62923]: _type = "Task" [ 793.979813] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.988573] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.049245] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.052714] env[62923]: DEBUG nova.policy [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dc59adf8ac884f209dc52564c3af6c50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0e5ba47886f4a41bef7982ee9f957d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 794.262017] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f47e60-8a6d-4fb5-9653-df7ddaa98979 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.270225] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a387b5-ec24-44bf-b091-2a8b681886e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.307942] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b917fe-f1bc-4ca5-8dd6-1d98e7244b73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.321023] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8179fc05-c658-43ff-875d-74d0e3d30e3b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.337948] env[62923]: DEBUG nova.compute.provider_tree [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.353141] env[62923]: INFO nova.scheduler.client.report [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Deleted allocations for instance 71dd8aff-4500-4c91-8a46-2a398fd03560 [ 794.481019] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 794.493712] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471802} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.494027] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.494248] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.494540] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9344d30-db92-464b-b678-183dfaf7d499 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.501930] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 794.501930] env[62923]: value = "task-1369875" [ 794.501930] env[62923]: _type = "Task" [ 794.501930] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.510038] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.587385] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Successfully created port: 281294a6-93ca-4a3d-b526-62a7775e01df {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.840178] env[62923]: DEBUG nova.scheduler.client.report [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.860911] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dcda9cad-f014-4e95-9cd2-a3bcabfdcf45 tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "71dd8aff-4500-4c91-8a46-2a398fd03560" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 153.436s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.013021] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145948} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.013021] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 795.013021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1ce473-b456-4be5-9070-8e56cebfddb6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.032964] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.033848] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e1c390d-3bfc-4af3-9729-8b0ad6c908e7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.053234] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 795.053234] env[62923]: value = "task-1369876" [ 795.053234] env[62923]: _type = "Task" [ 795.053234] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.062757] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369876, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.346371] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.879s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.347008] env[62923]: ERROR nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Traceback (most recent call last): [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self.driver.spawn(context, instance, image_meta, [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] vm_ref = self.build_virtual_machine(instance, [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] vif_infos = vmwarevif.get_vif_info(self._session, [ 795.347008] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] for vif in network_info: [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] return self._sync_wrapper(fn, *args, **kwargs) [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self.wait() [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self[:] = self._gt.wait() [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] return self._exit_event.wait() [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] current.throw(*self._exc) [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.347301] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] result = function(*args, **kwargs) [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] return func(*args, **kwargs) [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] raise e [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] nwinfo = self.network_api.allocate_for_instance( [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] created_port_ids = self._update_ports_for_instance( [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] with excutils.save_and_reraise_exception(): [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] self.force_reraise() [ 795.347678] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] raise self.value [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] updated_port = self._update_port( [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] _ensure_no_port_binding_failure(port) [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] raise exception.PortBindingFailed(port_id=port['id']) [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] nova.exception.PortBindingFailed: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. [ 795.347992] env[62923]: ERROR nova.compute.manager [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] [ 795.347992] env[62923]: DEBUG nova.compute.utils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 795.349093] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.979s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.351492] env[62923]: INFO nova.compute.claims [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.354810] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Build of instance d65ce1f0-e9de-4fc8-828b-95aec5615f95 was re-scheduled: Binding failed for port baa246a8-c6fe-472f-9a07-faacecad6be2, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 795.354810] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 795.355074] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquiring lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.355887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Acquired lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.355887] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.363394] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 795.421075] env[62923]: DEBUG nova.compute.manager [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Received event network-changed-281294a6-93ca-4a3d-b526-62a7775e01df {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 795.421390] env[62923]: DEBUG nova.compute.manager [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Refreshing instance network info cache due to event network-changed-281294a6-93ca-4a3d-b526-62a7775e01df. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 795.421545] env[62923]: DEBUG oslo_concurrency.lockutils [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] Acquiring lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.422749] env[62923]: DEBUG oslo_concurrency.lockutils [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] Acquired lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.422749] env[62923]: DEBUG nova.network.neutron [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Refreshing network info cache for port 281294a6-93ca-4a3d-b526-62a7775e01df {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.493653] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 795.526608] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 795.526855] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 795.527034] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.527226] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 795.527368] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.527515] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 795.527721] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 795.527871] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 795.528042] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 795.528192] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 795.528504] env[62923]: DEBUG nova.virt.hardware [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.532022] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c73ac6-8677-4662-bf70-6b4ddfdec43d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.538181] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04ba154-8fc4-4ce1-b0ba-0d3675fae2f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.562435] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369876, 'name': ReconfigVM_Task, 'duration_secs': 0.483441} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.562757] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.563354] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8a67957-721b-42a3-a3c9-2e7ba4385502 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.570518] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 795.570518] env[62923]: value = "task-1369877" [ 795.570518] env[62923]: _type = "Task" [ 795.570518] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.579015] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369877, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.658366] env[62923]: ERROR nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 795.658366] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.658366] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.658366] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.658366] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.658366] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.658366] env[62923]: ERROR nova.compute.manager raise self.value [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.658366] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 795.658366] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.658366] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 795.658730] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.658730] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 795.658730] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 795.658730] env[62923]: ERROR nova.compute.manager [ 795.658730] env[62923]: Traceback (most recent call last): [ 795.658730] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 795.658730] env[62923]: listener.cb(fileno) [ 795.658730] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.658730] env[62923]: result = function(*args, **kwargs) [ 795.658730] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 795.658730] env[62923]: return func(*args, **kwargs) [ 795.658730] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 795.658730] env[62923]: raise e [ 795.658730] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.658730] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 795.658730] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.658730] env[62923]: created_port_ids = self._update_ports_for_instance( [ 795.658730] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.658730] env[62923]: with excutils.save_and_reraise_exception(): [ 795.658730] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.658730] env[62923]: self.force_reraise() [ 795.658730] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.658730] env[62923]: raise self.value [ 795.658730] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.658730] env[62923]: updated_port = self._update_port( [ 795.658730] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.658730] env[62923]: _ensure_no_port_binding_failure(port) [ 795.658730] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.658730] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 795.659332] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 795.659332] env[62923]: Removing descriptor: 21 [ 795.659332] env[62923]: ERROR nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Traceback (most recent call last): [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] yield resources [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self.driver.spawn(context, instance, image_meta, [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 795.659332] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] vm_ref = self.build_virtual_machine(instance, [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] vif_infos = vmwarevif.get_vif_info(self._session, [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] for vif in network_info: [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return self._sync_wrapper(fn, *args, **kwargs) [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self.wait() [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self[:] = self._gt.wait() [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return self._exit_event.wait() [ 795.659629] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] result = hub.switch() [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return self.greenlet.switch() [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] result = function(*args, **kwargs) [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return func(*args, **kwargs) [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] raise e [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] nwinfo = self.network_api.allocate_for_instance( [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 795.659932] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] created_port_ids = self._update_ports_for_instance( [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] with excutils.save_and_reraise_exception(): [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self.force_reraise() [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] raise self.value [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] updated_port = self._update_port( [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] _ensure_no_port_binding_failure(port) [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.660249] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] raise exception.PortBindingFailed(port_id=port['id']) [ 795.660744] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 795.660744] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] [ 795.660744] env[62923]: INFO nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Terminating instance [ 795.661802] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquiring lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.875134] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.883483] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.947790] env[62923]: DEBUG nova.network.neutron [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.982363] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.008983] env[62923]: DEBUG nova.network.neutron [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.081039] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369877, 'name': Rename_Task, 'duration_secs': 0.129105} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.081326] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.081570] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99456beb-ae8b-40f8-bddc-83f75aad91b4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.087825] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 796.087825] env[62923]: value = "task-1369878" [ 796.087825] env[62923]: _type = "Task" [ 796.087825] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.094961] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.484996] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Releasing lock "refresh_cache-d65ce1f0-e9de-4fc8-828b-95aec5615f95" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.485289] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 796.485506] env[62923]: DEBUG nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 796.485804] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 796.500860] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.511053] env[62923]: DEBUG oslo_concurrency.lockutils [req-16e379f9-5d01-4ef8-8622-ec53704e0400 req-182dfc25-39a3-4e75-811c-36a5007fe74d service nova] Releasing lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.511410] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquired lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.511582] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.592879] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f62a373-fc78-455a-b00b-55d2fe5e054d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.602769] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd48ccb1-d36b-48cb-91dd-9b64eee256d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.605698] env[62923]: DEBUG oslo_vmware.api [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369878, 'name': PowerOnVM_Task, 'duration_secs': 0.417407} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.605941] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.606145] env[62923]: INFO nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Took 7.31 seconds to spawn the instance on the hypervisor. [ 796.606317] env[62923]: DEBUG nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 796.607314] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c6f500-4e54-466a-8e11-216d60fabef4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.633048] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41745bae-0f94-4597-ab8a-57d4cefc91e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.643215] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66f33a7-503a-4d13-95e4-c85c6ed07de3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.658717] env[62923]: DEBUG nova.compute.provider_tree [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.004856] env[62923]: DEBUG nova.network.neutron [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.030843] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.096375] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.148834] env[62923]: INFO nova.compute.manager [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Took 26.33 seconds to build instance. [ 797.161253] env[62923]: DEBUG nova.scheduler.client.report [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 797.445521] env[62923]: DEBUG nova.compute.manager [req-e0d7e3e2-cd5d-4c0a-bd1e-25e350777eb5 req-15b982df-26b9-47a1-b80e-427945a7f330 service nova] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Received event network-vif-deleted-281294a6-93ca-4a3d-b526-62a7775e01df {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 797.508036] env[62923]: INFO nova.compute.manager [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] [instance: d65ce1f0-e9de-4fc8-828b-95aec5615f95] Took 1.02 seconds to deallocate network for instance. [ 797.599958] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Releasing lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.600504] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 797.600611] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 797.600913] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f9bcb1b-c8a9-4f0c-90ff-b16603a989d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.610811] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02784e9-5912-4d30-b124-7ad7946f05f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.632549] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance db26908c-6aa6-47b8-a3c4-461247e36d85 could not be found. [ 797.632780] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.632956] env[62923]: INFO nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Took 0.03 seconds to destroy the instance on the hypervisor. [ 797.633213] env[62923]: DEBUG oslo.service.loopingcall [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.633426] env[62923]: DEBUG nova.compute.manager [-] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.633516] env[62923]: DEBUG nova.network.neutron [-] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.647449] env[62923]: DEBUG nova.network.neutron [-] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.649284] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6684c4a3-95ae-48e4-82cb-366b3e1e836e tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.816s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.666577] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.316s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.666577] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 797.668701] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.660s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.670281] env[62923]: INFO nova.compute.claims [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.150116] env[62923]: DEBUG nova.network.neutron [-] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.153627] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 798.170188] env[62923]: DEBUG nova.compute.utils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.171641] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 798.172342] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 798.220035] env[62923]: DEBUG nova.policy [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68e62d519b19448c8cac7f1b2e55a087', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3b09245b63144e9bbcb2262aef33a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 798.229327] env[62923]: INFO nova.compute.manager [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Rebuilding instance [ 798.271814] env[62923]: DEBUG nova.compute.manager [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 798.272714] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991e9527-25a1-475b-a2ef-d11aa605e95b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.539195] env[62923]: INFO nova.scheduler.client.report [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Deleted allocations for instance d65ce1f0-e9de-4fc8-828b-95aec5615f95 [ 798.662019] env[62923]: INFO nova.compute.manager [-] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Took 1.02 seconds to deallocate network for instance. [ 798.665347] env[62923]: DEBUG nova.compute.claims [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 798.665562] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.675096] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.675607] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 798.728658] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Successfully created port: 830121e2-d6a1-4b98-b104-48e570c31125 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.785458] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 798.785884] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf835bc0-2d2b-4eaf-b64b-ca654f14c648 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.800250] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 798.800250] env[62923]: value = "task-1369879" [ 798.800250] env[62923]: _type = "Task" [ 798.800250] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.813754] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.953315] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2ee7ca-45f9-4bf7-a097-49e268c0ddcb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.961021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3868550-5621-4b9c-9e1d-d4f7977cd022 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.990782] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5ee61b-ada7-444e-a1a8-70dea4974eb6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.998331] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e51195e-3193-488e-98a0-6384904a2d49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.011589] env[62923]: DEBUG nova.compute.provider_tree [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.049935] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5b362386-6fe9-4ca2-9487-d899d8e68a6d tempest-ListServerFiltersTestJSON-718682557 tempest-ListServerFiltersTestJSON-718682557-project-member] Lock "d65ce1f0-e9de-4fc8-828b-95aec5615f95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 157.137s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.311376] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369879, 'name': PowerOffVM_Task, 'duration_secs': 0.112963} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.315018] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 799.315018] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.315018] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b303a6-b2ad-47ee-8bbe-269fd9841d05 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.319937] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 799.320313] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bc4dffa-7a06-456a-8054-8daa793bec7f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.351152] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 799.351152] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 799.351152] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Deleting the datastore file [datastore1] 7c98c50a-e7c7-4430-b5c6-dec88a78c397 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.351152] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce912d40-2cbc-4895-a8aa-304991897b0d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.356336] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 799.356336] env[62923]: value = "task-1369881" [ 799.356336] env[62923]: _type = "Task" [ 799.356336] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.364411] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.518115] env[62923]: DEBUG nova.scheduler.client.report [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.552647] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 799.591230] env[62923]: DEBUG nova.compute.manager [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Received event network-changed-830121e2-d6a1-4b98-b104-48e570c31125 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.591442] env[62923]: DEBUG nova.compute.manager [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Refreshing instance network info cache due to event network-changed-830121e2-d6a1-4b98-b104-48e570c31125. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 799.591630] env[62923]: DEBUG oslo_concurrency.lockutils [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] Acquiring lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.593163] env[62923]: DEBUG oslo_concurrency.lockutils [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] Acquired lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.593163] env[62923]: DEBUG nova.network.neutron [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Refreshing network info cache for port 830121e2-d6a1-4b98-b104-48e570c31125 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 799.687099] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 799.715092] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.715092] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.715092] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.716096] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.716096] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.716096] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.716096] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.716096] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.716299] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.716595] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.716880] env[62923]: DEBUG nova.virt.hardware [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.717916] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e8f092-6789-4988-9bee-1174165ec276 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.727126] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa0066a-2e67-415a-bcce-7df92858eebe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.812995] env[62923]: ERROR nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 799.812995] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.812995] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.812995] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.812995] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.812995] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.812995] env[62923]: ERROR nova.compute.manager raise self.value [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.812995] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 799.812995] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.812995] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 799.813459] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.813459] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 799.813459] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 799.813459] env[62923]: ERROR nova.compute.manager [ 799.813459] env[62923]: Traceback (most recent call last): [ 799.813459] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 799.813459] env[62923]: listener.cb(fileno) [ 799.813459] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.813459] env[62923]: result = function(*args, **kwargs) [ 799.813459] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 799.813459] env[62923]: return func(*args, **kwargs) [ 799.813459] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 799.813459] env[62923]: raise e [ 799.813459] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.813459] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 799.813459] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.813459] env[62923]: created_port_ids = self._update_ports_for_instance( [ 799.813459] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.813459] env[62923]: with excutils.save_and_reraise_exception(): [ 799.813459] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.813459] env[62923]: self.force_reraise() [ 799.813459] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.813459] env[62923]: raise self.value [ 799.813459] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.813459] env[62923]: updated_port = self._update_port( [ 799.813459] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.813459] env[62923]: _ensure_no_port_binding_failure(port) [ 799.813459] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.813459] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 799.814180] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 799.814180] env[62923]: Removing descriptor: 18 [ 799.814180] env[62923]: ERROR nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Traceback (most recent call last): [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] yield resources [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self.driver.spawn(context, instance, image_meta, [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 799.814180] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] vm_ref = self.build_virtual_machine(instance, [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] vif_infos = vmwarevif.get_vif_info(self._session, [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] for vif in network_info: [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return self._sync_wrapper(fn, *args, **kwargs) [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self.wait() [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self[:] = self._gt.wait() [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return self._exit_event.wait() [ 799.814488] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] result = hub.switch() [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return self.greenlet.switch() [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] result = function(*args, **kwargs) [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return func(*args, **kwargs) [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] raise e [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] nwinfo = self.network_api.allocate_for_instance( [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 799.814818] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] created_port_ids = self._update_ports_for_instance( [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] with excutils.save_and_reraise_exception(): [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self.force_reraise() [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] raise self.value [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] updated_port = self._update_port( [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] _ensure_no_port_binding_failure(port) [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 799.815181] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] raise exception.PortBindingFailed(port_id=port['id']) [ 799.815480] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 799.815480] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] [ 799.815480] env[62923]: INFO nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Terminating instance [ 799.816447] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.867162] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098809} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.867417] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 799.867590] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 799.867756] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 800.022164] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.022677] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 800.025430] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.230s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.026846] env[62923]: INFO nova.compute.claims [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.079213] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.114151] env[62923]: DEBUG nova.network.neutron [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.219452] env[62923]: DEBUG nova.network.neutron [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.531351] env[62923]: DEBUG nova.compute.utils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 800.536721] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 800.537576] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 800.577907] env[62923]: DEBUG nova.policy [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37debff078b4389813658cbad297e65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0db41047d1004a1d9ca7f663178058da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 800.724593] env[62923]: DEBUG oslo_concurrency.lockutils [req-85f633fb-bfae-4089-9378-f2248a2b01ab req-3b05af01-62a2-4f74-91cb-bf656cdc76e4 service nova] Releasing lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.724998] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.725189] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.907375] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.907375] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.907375] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.907375] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.907582] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.907582] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.908168] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.911020] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.911020] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.911020] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.911020] env[62923]: DEBUG nova.virt.hardware [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.911020] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4f2494-aae5-4bbd-aa92-997d6b42f2c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.919340] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fbb8ab-9056-4c0c-9c8a-492b90e20272 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.937054] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.940784] env[62923]: DEBUG oslo.service.loopingcall [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.941190] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.941531] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e45bd6a4-c8ed-409d-b42c-21192916b0e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.959134] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.959134] env[62923]: value = "task-1369882" [ 800.959134] env[62923]: _type = "Task" [ 800.959134] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.966895] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369882, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.977739] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Successfully created port: 40143bd4-2a73-46ca-bed1-d909b7cf967a {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 801.041370] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 801.259416] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 801.326040] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2d3a30-2ccf-490f-ab1b-6e2991a61ca2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.333271] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3c6ac9-7108-4ef9-819c-b57275f374cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.370179] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2739959e-ca7f-455f-9727-c2d5376bef7f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.377453] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c30a84-7e36-495a-b3f9-fb0f45071ea7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.390593] env[62923]: DEBUG nova.compute.provider_tree [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.428739] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.469247] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369882, 'name': CreateVM_Task, 'duration_secs': 0.28677} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.469377] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.469888] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.470059] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.470370] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 801.470899] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8922f49-2975-41b0-ae4c-17a77577a810 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.475358] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 801.475358] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5210de70-6ec3-aa7a-e56e-0cfc1393355b" [ 801.475358] env[62923]: _type = "Task" [ 801.475358] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.483987] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5210de70-6ec3-aa7a-e56e-0cfc1393355b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.617862] env[62923]: DEBUG nova.compute.manager [req-86cf2ea8-d779-4ef3-bbc9-b3bc8a6802bd req-47b03d0c-4569-4aeb-bbb5-4784f930677c service nova] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Received event network-vif-deleted-830121e2-d6a1-4b98-b104-48e570c31125 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 801.893986] env[62923]: DEBUG nova.scheduler.client.report [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 801.933225] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.933225] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 801.933225] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 801.933225] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db04eddd-275a-436e-b3d9-d4c4ba3ba69f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.942399] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a08286-4a90-4fc6-a802-c82d7493925a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.967091] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 83ead303-c5b9-4600-935b-fa1a77689dcf could not be found. [ 801.967313] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.967489] env[62923]: INFO nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 801.967723] env[62923]: DEBUG oslo.service.loopingcall [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.967945] env[62923]: DEBUG nova.compute.manager [-] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 801.968042] env[62923]: DEBUG nova.network.neutron [-] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 801.984970] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5210de70-6ec3-aa7a-e56e-0cfc1393355b, 'name': SearchDatastore_Task, 'duration_secs': 0.009507} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.985296] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.985562] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.985808] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.985953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.986150] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.986398] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-935f687e-cf1e-4767-90cc-9763030f0750 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.995115] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.995299] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.996046] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-418e9614-cca1-4acb-8bbb-3de73ccd97f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.001165] env[62923]: DEBUG nova.network.neutron [-] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.004744] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 802.004744] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a941b9-1e8c-adca-c448-554d66e96d5d" [ 802.004744] env[62923]: _type = "Task" [ 802.004744] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.019119] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a941b9-1e8c-adca-c448-554d66e96d5d, 'name': SearchDatastore_Task, 'duration_secs': 0.007865} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.020490] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b87779e-fe6f-4ac7-9981-1154643e0902 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.026419] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 802.026419] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522f69ed-61ce-37de-a149-8a2e438eabc9" [ 802.026419] env[62923]: _type = "Task" [ 802.026419] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.039277] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522f69ed-61ce-37de-a149-8a2e438eabc9, 'name': SearchDatastore_Task, 'duration_secs': 0.008388} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.039277] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.039434] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 802.039701] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5424d49a-10a2-4ba5-a33b-2757ed41e443 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.047415] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 802.047415] env[62923]: value = "task-1369883" [ 802.047415] env[62923]: _type = "Task" [ 802.047415] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.051604] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 802.059673] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.088682] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 802.089016] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 802.089214] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.089421] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 802.089604] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.089781] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 802.090015] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 802.090345] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 802.091116] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 802.091116] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 802.091116] env[62923]: DEBUG nova.virt.hardware [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 802.091884] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d808c6-fb1d-4d92-bcdc-cb24e41c1d55 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.100889] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d42fb73-108d-4083-abf2-3928e0d88308 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.117210] env[62923]: ERROR nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 802.117210] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 802.117210] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 802.117210] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 802.117210] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.117210] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.117210] env[62923]: ERROR nova.compute.manager raise self.value [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 802.117210] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 802.117210] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.117210] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 802.117629] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.117629] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 802.117629] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 802.117629] env[62923]: ERROR nova.compute.manager [ 802.117629] env[62923]: Traceback (most recent call last): [ 802.117629] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 802.117629] env[62923]: listener.cb(fileno) [ 802.117629] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 802.117629] env[62923]: result = function(*args, **kwargs) [ 802.117629] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 802.117629] env[62923]: return func(*args, **kwargs) [ 802.117629] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 802.117629] env[62923]: raise e [ 802.117629] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 802.117629] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 802.117629] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 802.117629] env[62923]: created_port_ids = self._update_ports_for_instance( [ 802.117629] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 802.117629] env[62923]: with excutils.save_and_reraise_exception(): [ 802.117629] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.117629] env[62923]: self.force_reraise() [ 802.117629] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.117629] env[62923]: raise self.value [ 802.117629] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 802.117629] env[62923]: updated_port = self._update_port( [ 802.117629] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.117629] env[62923]: _ensure_no_port_binding_failure(port) [ 802.117629] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.117629] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 802.118220] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 802.118220] env[62923]: Removing descriptor: 18 [ 802.118268] env[62923]: ERROR nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Traceback (most recent call last): [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] yield resources [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self.driver.spawn(context, instance, image_meta, [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] vm_ref = self.build_virtual_machine(instance, [ 802.118268] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] vif_infos = vmwarevif.get_vif_info(self._session, [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] for vif in network_info: [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return self._sync_wrapper(fn, *args, **kwargs) [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self.wait() [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self[:] = self._gt.wait() [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return self._exit_event.wait() [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 802.118537] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] result = hub.switch() [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return self.greenlet.switch() [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] result = function(*args, **kwargs) [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return func(*args, **kwargs) [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] raise e [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] nwinfo = self.network_api.allocate_for_instance( [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] created_port_ids = self._update_ports_for_instance( [ 802.118797] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] with excutils.save_and_reraise_exception(): [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self.force_reraise() [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] raise self.value [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] updated_port = self._update_port( [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] _ensure_no_port_binding_failure(port) [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] raise exception.PortBindingFailed(port_id=port['id']) [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 802.119082] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] [ 802.119357] env[62923]: INFO nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Terminating instance [ 802.120944] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.120944] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.121133] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 802.399343] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.399902] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 802.402837] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.772s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.405787] env[62923]: INFO nova.compute.claims [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.505843] env[62923]: DEBUG nova.network.neutron [-] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.557487] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369883, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461796} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.557744] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 802.557958] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.558216] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-577183e5-a45a-4441-9d68-058edcdc65f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.566467] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 802.566467] env[62923]: value = "task-1369884" [ 802.566467] env[62923]: _type = "Task" [ 802.566467] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.574532] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369884, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.640548] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.730612] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.912323] env[62923]: DEBUG nova.compute.utils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.920142] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 802.920142] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.008014] env[62923]: DEBUG nova.policy [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f02009c6b6b4c60b3060b4aa0c5df0d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a848858428540f390e398928dbba727', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.010273] env[62923]: INFO nova.compute.manager [-] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Took 1.04 seconds to deallocate network for instance. [ 803.014320] env[62923]: DEBUG nova.compute.claims [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 803.014320] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.077348] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369884, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076125} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.077789] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.078862] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edce1914-ded8-4774-8d80-a882733e7a49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.108013] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.108013] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c33b66d-45ed-4b38-8a0d-4fb3ff67605a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.129123] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 803.129123] env[62923]: value = "task-1369885" [ 803.129123] env[62923]: _type = "Task" [ 803.129123] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.135848] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369885, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.231685] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.232150] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 803.232346] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.232685] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c28a1a2-3d1d-4f63-bcf0-cf4215c42d87 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.241246] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7502fe-ce63-445f-841b-3216c79997d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.263877] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2e48555-68b5-4ed0-8ad6-a87833538df8 could not be found. [ 803.263877] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.263877] env[62923]: INFO nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 803.264087] env[62923]: DEBUG oslo.service.loopingcall [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.264255] env[62923]: DEBUG nova.compute.manager [-] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 803.264344] env[62923]: DEBUG nova.network.neutron [-] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.293774] env[62923]: DEBUG nova.network.neutron [-] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.418408] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 803.513922] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Successfully created port: 85a59bf2-c2a3-4e88-aa11-8f784b39fada {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.636117] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369885, 'name': ReconfigVM_Task, 'duration_secs': 0.28193} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.636395] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 7c98c50a-e7c7-4430-b5c6-dec88a78c397/7c98c50a-e7c7-4430-b5c6-dec88a78c397.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.637120] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0998dc55-9861-4a00-a7e9-b03308b44003 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.643166] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 803.643166] env[62923]: value = "task-1369886" [ 803.643166] env[62923]: _type = "Task" [ 803.643166] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.653274] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369886, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.744815] env[62923]: DEBUG nova.compute.manager [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Received event network-changed-40143bd4-2a73-46ca-bed1-d909b7cf967a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 803.744994] env[62923]: DEBUG nova.compute.manager [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Refreshing instance network info cache due to event network-changed-40143bd4-2a73-46ca-bed1-d909b7cf967a. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 803.745210] env[62923]: DEBUG oslo_concurrency.lockutils [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] Acquiring lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.745343] env[62923]: DEBUG oslo_concurrency.lockutils [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] Acquired lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.745490] env[62923]: DEBUG nova.network.neutron [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Refreshing network info cache for port 40143bd4-2a73-46ca-bed1-d909b7cf967a {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.780373] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f696496-ccb8-4283-b735-4e3273b4d61b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.788284] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de19807a-afe2-4a2e-b852-1f2e2bd38314 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.799024] env[62923]: DEBUG nova.network.neutron [-] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.826101] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70db53df-bcaf-49fa-a470-3447291a7726 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.834632] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ab996d-9145-49d9-84a3-08c2c57c6b9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.849257] env[62923]: DEBUG nova.compute.provider_tree [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.152928] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369886, 'name': Rename_Task, 'duration_secs': 0.126696} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.153248] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.153506] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e304a18-3a8e-4434-bf88-68071b35dd86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.160220] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 804.160220] env[62923]: value = "task-1369887" [ 804.160220] env[62923]: _type = "Task" [ 804.160220] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.168032] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.325756] env[62923]: INFO nova.compute.manager [-] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Took 1.06 seconds to deallocate network for instance. [ 804.328572] env[62923]: DEBUG nova.compute.claims [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 804.328572] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.342609] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.342856] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.351424] env[62923]: DEBUG nova.scheduler.client.report [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.388155] env[62923]: DEBUG nova.network.neutron [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.431018] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 804.456265] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 804.456542] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 804.456724] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.456936] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 804.457319] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.457498] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 804.457702] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 804.457861] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 804.458036] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 804.458200] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 804.458368] env[62923]: DEBUG nova.virt.hardware [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.459252] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc15b84-7025-416f-9123-1139a47c1135 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.467418] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aac499-a5ae-44f3-aa49-0a31e341224e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.549086] env[62923]: DEBUG nova.network.neutron [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.669772] env[62923]: DEBUG oslo_vmware.api [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369887, 'name': PowerOnVM_Task, 'duration_secs': 0.403619} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.670663] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 804.670663] env[62923]: DEBUG nova.compute.manager [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 804.671039] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dd30bb-fec7-4921-9119-bedafe3370c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.858018] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.858018] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 804.860766] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.734s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.937989] env[62923]: ERROR nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 804.937989] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.937989] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.937989] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.937989] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.937989] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.937989] env[62923]: ERROR nova.compute.manager raise self.value [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.937989] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 804.937989] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.937989] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 804.938406] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.938406] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 804.938406] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 804.938406] env[62923]: ERROR nova.compute.manager [ 804.938406] env[62923]: Traceback (most recent call last): [ 804.938406] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 804.938406] env[62923]: listener.cb(fileno) [ 804.938406] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.938406] env[62923]: result = function(*args, **kwargs) [ 804.938406] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.938406] env[62923]: return func(*args, **kwargs) [ 804.938406] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.938406] env[62923]: raise e [ 804.938406] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.938406] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 804.938406] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.938406] env[62923]: created_port_ids = self._update_ports_for_instance( [ 804.938406] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.938406] env[62923]: with excutils.save_and_reraise_exception(): [ 804.938406] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.938406] env[62923]: self.force_reraise() [ 804.938406] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.938406] env[62923]: raise self.value [ 804.938406] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.938406] env[62923]: updated_port = self._update_port( [ 804.938406] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.938406] env[62923]: _ensure_no_port_binding_failure(port) [ 804.938406] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.938406] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 804.939211] env[62923]: nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 804.939211] env[62923]: Removing descriptor: 18 [ 804.939211] env[62923]: ERROR nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Traceback (most recent call last): [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] yield resources [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self.driver.spawn(context, instance, image_meta, [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 804.939211] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] vm_ref = self.build_virtual_machine(instance, [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] vif_infos = vmwarevif.get_vif_info(self._session, [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] for vif in network_info: [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return self._sync_wrapper(fn, *args, **kwargs) [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self.wait() [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self[:] = self._gt.wait() [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return self._exit_event.wait() [ 804.939523] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] result = hub.switch() [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return self.greenlet.switch() [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] result = function(*args, **kwargs) [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return func(*args, **kwargs) [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] raise e [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] nwinfo = self.network_api.allocate_for_instance( [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 804.939891] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] created_port_ids = self._update_ports_for_instance( [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] with excutils.save_and_reraise_exception(): [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self.force_reraise() [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] raise self.value [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] updated_port = self._update_port( [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] _ensure_no_port_binding_failure(port) [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 804.940352] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] raise exception.PortBindingFailed(port_id=port['id']) [ 804.940655] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 804.940655] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] [ 804.940655] env[62923]: INFO nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Terminating instance [ 804.941318] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquiring lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.941474] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquired lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.941634] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.053636] env[62923]: DEBUG oslo_concurrency.lockutils [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] Releasing lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.053636] env[62923]: DEBUG nova.compute.manager [req-e042f4a8-02a5-42f0-b886-bc1bf1fcc056 req-ddb56487-fe05-4ce2-a221-2d0ca0db5371 service nova] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Received event network-vif-deleted-40143bd4-2a73-46ca-bed1-d909b7cf967a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.186465] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.366189] env[62923]: DEBUG nova.compute.utils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.367574] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 805.367743] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 805.415868] env[62923]: DEBUG nova.policy [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fd9af4ed1164fbb819a20965df9caa7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9467d357348742ea88dc4c6d1d36d494', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 805.472727] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.597602] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e617b2fc-9849-4e1d-8fd3-59e1a176aaeb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.605392] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafa8db9-b69c-4a09-b466-4a04b97351d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.634649] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.636354] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22b8871-dc1d-40a2-931f-a57060c44c7d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.644146] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230668c9-dd2d-4ef2-bdf4-ca434aa3cf39 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.663375] env[62923]: DEBUG nova.compute.provider_tree [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.774107] env[62923]: DEBUG nova.compute.manager [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Received event network-changed-85a59bf2-c2a3-4e88-aa11-8f784b39fada {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.774107] env[62923]: DEBUG nova.compute.manager [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Refreshing instance network info cache due to event network-changed-85a59bf2-c2a3-4e88-aa11-8f784b39fada. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 805.774107] env[62923]: DEBUG oslo_concurrency.lockutils [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] Acquiring lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.823920] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Successfully created port: bdd6a040-f201-4806-8fa8-86008708d23c {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 805.873163] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 806.140381] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Releasing lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.140806] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 806.141048] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.141383] env[62923]: DEBUG oslo_concurrency.lockutils [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] Acquired lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.141555] env[62923]: DEBUG nova.network.neutron [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Refreshing network info cache for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 806.142649] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9544d716-609a-49cd-ae1e-954cf08e7ae1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.151964] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43439f50-082d-46e7-b5e0-36b95904a6d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.163929] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.164190] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.164382] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.164571] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.164772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.167172] env[62923]: DEBUG nova.scheduler.client.report [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.170950] env[62923]: INFO nova.compute.manager [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Terminating instance [ 806.172544] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "refresh_cache-7c98c50a-e7c7-4430-b5c6-dec88a78c397" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.172708] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquired lock "refresh_cache-7c98c50a-e7c7-4430-b5c6-dec88a78c397" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.172873] env[62923]: DEBUG nova.network.neutron [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.183179] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a369d56-8f85-4d04-ac6b-bf2eced7098f could not be found. [ 806.183382] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.183663] env[62923]: INFO nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 806.186201] env[62923]: DEBUG oslo.service.loopingcall [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.186201] env[62923]: DEBUG nova.compute.manager [-] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 806.186201] env[62923]: DEBUG nova.network.neutron [-] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.212078] env[62923]: DEBUG nova.network.neutron [-] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.671591] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.811s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.672275] env[62923]: ERROR nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Traceback (most recent call last): [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self.driver.spawn(context, instance, image_meta, [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] vm_ref = self.build_virtual_machine(instance, [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.672275] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] for vif in network_info: [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return self._sync_wrapper(fn, *args, **kwargs) [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self.wait() [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self[:] = self._gt.wait() [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return self._exit_event.wait() [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] result = hub.switch() [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 806.672580] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return self.greenlet.switch() [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] result = function(*args, **kwargs) [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] return func(*args, **kwargs) [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] raise e [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] nwinfo = self.network_api.allocate_for_instance( [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] created_port_ids = self._update_ports_for_instance( [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] with excutils.save_and_reraise_exception(): [ 806.672998] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] self.force_reraise() [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] raise self.value [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] updated_port = self._update_port( [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] _ensure_no_port_binding_failure(port) [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] raise exception.PortBindingFailed(port_id=port['id']) [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] nova.exception.PortBindingFailed: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. [ 806.673348] env[62923]: ERROR nova.compute.manager [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] [ 806.673635] env[62923]: DEBUG nova.compute.utils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 806.674989] env[62923]: DEBUG nova.network.neutron [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.676806] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.145s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.680958] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Build of instance f76d2304-7a4e-4f18-80de-ecb0b67bec28 was re-scheduled: Binding failed for port 5de6e97b-085e-43cc-9bee-0101cf0d68d8, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 806.681462] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 806.681738] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.682009] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquired lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.682289] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.709992] env[62923]: DEBUG nova.network.neutron [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.714266] env[62923]: DEBUG nova.network.neutron [-] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.847627] env[62923]: DEBUG nova.network.neutron [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.874333] env[62923]: DEBUG nova.network.neutron [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.884107] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 806.909026] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 806.909452] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 806.909655] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.909864] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 806.910133] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.910364] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 806.910622] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 806.910876] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 806.911143] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 806.911377] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 806.911605] env[62923]: DEBUG nova.virt.hardware [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 806.912552] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1ad0a3-e37c-4781-95ca-67d7a2e2c883 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.923239] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f142d2-a489-49ce-b4bb-5ffa9f9d4f19 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.994934] env[62923]: ERROR nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 806.994934] env[62923]: ERROR nova.compute.manager Traceback (most recent call last): [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.994934] env[62923]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.994934] env[62923]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.994934] env[62923]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.994934] env[62923]: ERROR nova.compute.manager self.force_reraise() [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.994934] env[62923]: ERROR nova.compute.manager raise self.value [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.994934] env[62923]: ERROR nova.compute.manager updated_port = self._update_port( [ 806.994934] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.994934] env[62923]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 806.995357] env[62923]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.995357] env[62923]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 806.995357] env[62923]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 806.995357] env[62923]: ERROR nova.compute.manager [ 806.995357] env[62923]: Traceback (most recent call last): [ 806.995357] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 806.995357] env[62923]: listener.cb(fileno) [ 806.995357] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.995357] env[62923]: result = function(*args, **kwargs) [ 806.995357] env[62923]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.995357] env[62923]: return func(*args, **kwargs) [ 806.995357] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.995357] env[62923]: raise e [ 806.995357] env[62923]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.995357] env[62923]: nwinfo = self.network_api.allocate_for_instance( [ 806.995357] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.995357] env[62923]: created_port_ids = self._update_ports_for_instance( [ 806.995357] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.995357] env[62923]: with excutils.save_and_reraise_exception(): [ 806.995357] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.995357] env[62923]: self.force_reraise() [ 806.995357] env[62923]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.995357] env[62923]: raise self.value [ 806.995357] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.995357] env[62923]: updated_port = self._update_port( [ 806.995357] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.995357] env[62923]: _ensure_no_port_binding_failure(port) [ 806.995357] env[62923]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.995357] env[62923]: raise exception.PortBindingFailed(port_id=port['id']) [ 806.995967] env[62923]: nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 806.995967] env[62923]: Removing descriptor: 18 [ 806.995967] env[62923]: ERROR nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Traceback (most recent call last): [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] yield resources [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self.driver.spawn(context, instance, image_meta, [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.995967] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] vm_ref = self.build_virtual_machine(instance, [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] for vif in network_info: [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return self._sync_wrapper(fn, *args, **kwargs) [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self.wait() [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self[:] = self._gt.wait() [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return self._exit_event.wait() [ 806.996247] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] result = hub.switch() [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return self.greenlet.switch() [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] result = function(*args, **kwargs) [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return func(*args, **kwargs) [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] raise e [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] nwinfo = self.network_api.allocate_for_instance( [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 806.996513] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] created_port_ids = self._update_ports_for_instance( [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] with excutils.save_and_reraise_exception(): [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self.force_reraise() [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] raise self.value [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] updated_port = self._update_port( [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] _ensure_no_port_binding_failure(port) [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.996783] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] raise exception.PortBindingFailed(port_id=port['id']) [ 806.997070] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 806.997070] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] [ 806.997070] env[62923]: INFO nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Terminating instance [ 806.998495] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquiring lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.998658] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquired lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.998929] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.203078] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.217298] env[62923]: INFO nova.compute.manager [-] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Took 1.03 seconds to deallocate network for instance. [ 807.219800] env[62923]: DEBUG nova.compute.claims [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 807.219971] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.311006] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.350822] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Releasing lock "refresh_cache-7c98c50a-e7c7-4430-b5c6-dec88a78c397" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.351266] env[62923]: DEBUG nova.compute.manager [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 807.351459] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.352365] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bff6e09-3f27-4748-b3f6-758c05b85719 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.360407] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.360649] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35845589-551f-4ce1-bd5a-eb49937f90b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.366591] env[62923]: DEBUG oslo_vmware.api [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 807.366591] env[62923]: value = "task-1369888" [ 807.366591] env[62923]: _type = "Task" [ 807.366591] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.374216] env[62923]: DEBUG oslo_vmware.api [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.376727] env[62923]: DEBUG oslo_concurrency.lockutils [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] Releasing lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.376968] env[62923]: DEBUG nova.compute.manager [req-e520510c-cfcd-4afc-990f-59312e260331 req-133f9910-887f-4971-a7c2-d4726602d60d service nova] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Received event network-vif-deleted-85a59bf2-c2a3-4e88-aa11-8f784b39fada {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.517207] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.609042] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.799588] env[62923]: DEBUG nova.compute.manager [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Received event network-changed-bdd6a040-f201-4806-8fa8-86008708d23c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.799787] env[62923]: DEBUG nova.compute.manager [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Refreshing instance network info cache due to event network-changed-bdd6a040-f201-4806-8fa8-86008708d23c. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 807.799974] env[62923]: DEBUG oslo_concurrency.lockutils [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] Acquiring lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.813673] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Releasing lock "refresh_cache-f76d2304-7a4e-4f18-80de-ecb0b67bec28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.813876] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 807.814073] env[62923]: DEBUG nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 807.814238] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.834152] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.876619] env[62923]: DEBUG oslo_vmware.api [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369888, 'name': PowerOffVM_Task, 'duration_secs': 0.156328} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.876948] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.877024] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.877267] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d151426-f682-4195-88fd-d7d169de7d3f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.902857] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.903037] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.903600] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Deleting the datastore file [datastore2] 7c98c50a-e7c7-4430-b5c6-dec88a78c397 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.903600] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b250a70d-d4c8-4f6a-b345-d61162b4b2b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.909375] env[62923]: DEBUG oslo_vmware.api [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for the task: (returnval){ [ 807.909375] env[62923]: value = "task-1369890" [ 807.909375] env[62923]: _type = "Task" [ 807.909375] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.916424] env[62923]: DEBUG oslo_vmware.api [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.111587] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Releasing lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.112039] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 808.112236] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.112608] env[62923]: DEBUG oslo_concurrency.lockutils [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] Acquired lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.112811] env[62923]: DEBUG nova.network.neutron [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Refreshing network info cache for port bdd6a040-f201-4806-8fa8-86008708d23c {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.114197] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16caee47-9254-479b-ae55-086dedb8088f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.123706] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ca51d4-4b8e-4058-86ae-b3fa32f32b88 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.146194] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 08d39755-f94c-45aa-bfb5-f179e8a370db could not be found. [ 808.146419] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.146593] env[62923]: INFO nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Took 0.03 seconds to destroy the instance on the hypervisor. [ 808.146829] env[62923]: DEBUG oslo.service.loopingcall [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.147054] env[62923]: DEBUG nova.compute.manager [-] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.147145] env[62923]: DEBUG nova.network.neutron [-] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.160945] env[62923]: DEBUG nova.network.neutron [-] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.215051] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f76d2304-7a4e-4f18-80de-ecb0b67bec28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.215051] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7c98c50a-e7c7-4430-b5c6-dec88a78c397 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.215051] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance db26908c-6aa6-47b8-a3c4-461247e36d85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.215257] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 83ead303-c5b9-4600-935b-fa1a77689dcf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.215295] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance c2e48555-68b5-4ed0-8ad6-a87833538df8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.215422] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 8a369d56-8f85-4d04-ac6b-bf2eced7098f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.215567] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 08d39755-f94c-45aa-bfb5-f179e8a370db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.337436] env[62923]: DEBUG nova.network.neutron [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.419151] env[62923]: DEBUG oslo_vmware.api [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Task: {'id': task-1369890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294749} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.419525] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.419686] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 808.419825] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.420009] env[62923]: INFO nova.compute.manager [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Took 1.07 seconds to destroy the instance on the hypervisor. [ 808.420258] env[62923]: DEBUG oslo.service.loopingcall [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.420446] env[62923]: DEBUG nova.compute.manager [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.420536] env[62923]: DEBUG nova.network.neutron [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.435657] env[62923]: DEBUG nova.network.neutron [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.638781] env[62923]: DEBUG nova.network.neutron [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.662881] env[62923]: DEBUG nova.network.neutron [-] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.717469] env[62923]: DEBUG nova.network.neutron [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.720042] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 98974fb7-049a-4c72-a352-bc0a50d2a879 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.840246] env[62923]: INFO nova.compute.manager [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: f76d2304-7a4e-4f18-80de-ecb0b67bec28] Took 1.03 seconds to deallocate network for instance. [ 808.937679] env[62923]: DEBUG nova.network.neutron [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.166030] env[62923]: INFO nova.compute.manager [-] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Took 1.02 seconds to deallocate network for instance. [ 809.169313] env[62923]: DEBUG nova.compute.claims [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Aborting claim: {{(pid=62923) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 809.169313] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.221542] env[62923]: DEBUG oslo_concurrency.lockutils [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] Releasing lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.221844] env[62923]: DEBUG nova.compute.manager [req-02b01fab-b111-43dd-9d51-ef1999ff52df req-f1c29586-de95-4e3e-afb8-5b5c6d096875 service nova] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Received event network-vif-deleted-bdd6a040-f201-4806-8fa8-86008708d23c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.222671] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 81cca322-c1a0-4fbd-8013-0e4a4694ecfd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.440606] env[62923]: INFO nova.compute.manager [-] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Took 1.02 seconds to deallocate network for instance. [ 809.725739] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 6fa4d8a8-093f-4ae8-9148-f15f5bf98944 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.867792] env[62923]: INFO nova.scheduler.client.report [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Deleted allocations for instance f76d2304-7a4e-4f18-80de-ecb0b67bec28 [ 809.947524] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.229012] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.375827] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b4bd0262-d137-4f3a-b70f-7705c679331c tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "f76d2304-7a4e-4f18-80de-ecb0b67bec28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 160.336s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.732445] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 6cf594e3-e4a6-45f5-b8d2-06db1c200042 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.879652] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 811.235916] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance a616c7f0-8c39-4c08-a1a4-1d89e158d3c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.397559] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.457097] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.457393] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.739070] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7c3edceb-cc58-4925-a97a-3204936c836d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.242085] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 880cce70-5a0c-40a6-91b5-73d074feab6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.745659] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 92c59517-7e6f-45bd-8211-789a718d66d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.249330] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 94d2670f-d858-437a-a166-d148a57e07ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.753867] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 67a83e64-c8bd-499c-895a-11976d69195b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.257055] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.257055] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 814.257055] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 814.446492] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03df2d53-df4c-4a81-8787-3784be0058df {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.454037] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bf5c9d-8298-4b1f-97f6-aab6a48a5cea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.484024] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361f8ceb-6551-4d9c-9482-a83ca03bb06b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.490805] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184e9e8c-975a-4959-9428-d7d48a22bd6e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.503630] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.006936] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.511500] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 815.511761] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.835s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.512070] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.463s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.513604] env[62923]: INFO nova.compute.claims [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.717463] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7733d318-720f-4ea0-8d1a-09d960091365 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.724686] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28c00c4-cd80-43a7-818a-37dd0b3ff19c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.755528] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7acbb20-5ac3-4b1f-bec8-6704e8a52529 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.762931] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76804a90-7bdb-46d5-967d-aff216d0db8d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.775700] env[62923]: DEBUG nova.compute.provider_tree [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.279974] env[62923]: DEBUG nova.scheduler.client.report [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.491093] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 817.788100] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.788682] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 817.792225] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.908s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.793395] env[62923]: INFO nova.compute.claims [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.298060] env[62923]: DEBUG nova.compute.utils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 818.302472] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 818.302472] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 818.379418] env[62923]: DEBUG nova.policy [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10e6ab2f8c50450d9ce9c06079726912', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb4fde1412c240b288e7337a06fae728', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 818.669352] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Successfully created port: 6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.803014] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 818.991607] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Successfully created port: eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.037875] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef0a29d-2721-46f0-9842-cdc168dc10ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.045720] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0a2f50-6e34-4640-90ad-f7d8603dacf0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.076636] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820ae1e3-6af4-42f4-95c6-33d005380bf3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.084466] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf4cd14-6eeb-4006-8338-5500ab5f25e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.100159] env[62923]: DEBUG nova.compute.provider_tree [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.603883] env[62923]: DEBUG nova.scheduler.client.report [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.814190] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 819.838072] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 819.838344] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 819.838478] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 819.839216] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 819.839216] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 819.839216] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 819.839216] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 819.839449] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 819.839449] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 819.839552] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 819.839715] env[62923]: DEBUG nova.virt.hardware [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 819.840579] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d5ac86-9eb9-4636-af45-d694bfbd016c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.849311] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70199421-6ac2-4544-a26f-1b5e3364c3b4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.109367] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.109899] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 820.112514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.447s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.556284] env[62923]: DEBUG nova.compute.manager [req-2e096bff-bf88-47cb-8d6f-ed9289965b02 req-7311dde5-111b-4877-a01f-28e5b28434d2 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received event network-vif-plugged-6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.556506] env[62923]: DEBUG oslo_concurrency.lockutils [req-2e096bff-bf88-47cb-8d6f-ed9289965b02 req-7311dde5-111b-4877-a01f-28e5b28434d2 service nova] Acquiring lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.556712] env[62923]: DEBUG oslo_concurrency.lockutils [req-2e096bff-bf88-47cb-8d6f-ed9289965b02 req-7311dde5-111b-4877-a01f-28e5b28434d2 service nova] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.556881] env[62923]: DEBUG oslo_concurrency.lockutils [req-2e096bff-bf88-47cb-8d6f-ed9289965b02 req-7311dde5-111b-4877-a01f-28e5b28434d2 service nova] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.557061] env[62923]: DEBUG nova.compute.manager [req-2e096bff-bf88-47cb-8d6f-ed9289965b02 req-7311dde5-111b-4877-a01f-28e5b28434d2 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] No waiting events found dispatching network-vif-plugged-6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 820.557267] env[62923]: WARNING nova.compute.manager [req-2e096bff-bf88-47cb-8d6f-ed9289965b02 req-7311dde5-111b-4877-a01f-28e5b28434d2 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received unexpected event network-vif-plugged-6785022c-2ac9-4a61-ad21-298adb7ba096 for instance with vm_state building and task_state spawning. [ 820.616675] env[62923]: DEBUG nova.compute.utils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.618157] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 820.618406] env[62923]: DEBUG nova.network.neutron [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.660221] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Successfully updated port: 6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 820.684483] env[62923]: DEBUG nova.policy [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bc7339e5cc845668864bcdd8d09e610', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd013513ad708456f9a827c8d4974beec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 820.862823] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816ea21d-4c80-4315-82f9-f965059c733d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.870326] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81456db-5d6c-4adf-810c-d8d980162ec5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.899732] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c109d33-1475-4bf2-ac34-341132b7664c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.907909] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27a4d05-19a2-49c9-9002-16e7979d78b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.922099] env[62923]: DEBUG nova.compute.provider_tree [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.125701] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 821.162818] env[62923]: DEBUG nova.network.neutron [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Successfully created port: ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.423973] env[62923]: DEBUG nova.scheduler.client.report [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.929537] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.817s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.930218] env[62923]: ERROR nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Traceback (most recent call last): [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self.driver.spawn(context, instance, image_meta, [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] vm_ref = self.build_virtual_machine(instance, [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.930218] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] for vif in network_info: [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return self._sync_wrapper(fn, *args, **kwargs) [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self.wait() [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self[:] = self._gt.wait() [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return self._exit_event.wait() [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] result = hub.switch() [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 821.930503] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return self.greenlet.switch() [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] result = function(*args, **kwargs) [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] return func(*args, **kwargs) [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] raise e [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] nwinfo = self.network_api.allocate_for_instance( [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] created_port_ids = self._update_ports_for_instance( [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] with excutils.save_and_reraise_exception(): [ 821.930781] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] self.force_reraise() [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] raise self.value [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] updated_port = self._update_port( [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] _ensure_no_port_binding_failure(port) [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] raise exception.PortBindingFailed(port_id=port['id']) [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] nova.exception.PortBindingFailed: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. [ 821.931069] env[62923]: ERROR nova.compute.manager [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] [ 821.931305] env[62923]: DEBUG nova.compute.utils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 821.932245] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.257s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.933688] env[62923]: INFO nova.compute.claims [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.936648] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Build of instance db26908c-6aa6-47b8-a3c4-461247e36d85 was re-scheduled: Binding failed for port 281294a6-93ca-4a3d-b526-62a7775e01df, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 821.937097] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 821.937325] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquiring lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.937468] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Acquired lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.937617] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.139054] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 822.163322] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.163610] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.163766] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.163939] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.164092] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.164238] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.164436] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.164585] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.164771] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.164944] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.165124] env[62923]: DEBUG nova.virt.hardware [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.165981] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6e13fd-e702-481d-90a7-1ae59b029a0d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.173842] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96726122-a643-41fc-b8ac-a255e0b66216 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.462230] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.523537] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.580947] env[62923]: DEBUG nova.compute.manager [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received event network-changed-6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.581191] env[62923]: DEBUG nova.compute.manager [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Refreshing instance network info cache due to event network-changed-6785022c-2ac9-4a61-ad21-298adb7ba096. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 822.581379] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] Acquiring lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.581523] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] Acquired lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.581683] env[62923]: DEBUG nova.network.neutron [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Refreshing network info cache for port 6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.029205] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Releasing lock "refresh_cache-db26908c-6aa6-47b8-a3c4-461247e36d85" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.029205] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 823.029205] env[62923]: DEBUG nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 823.029205] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 823.061798] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.154584] env[62923]: DEBUG nova.network.neutron [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.159576] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9786f5-73a2-4c49-9ff4-3bb7286db53b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.167335] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ae075a-9c04-4679-a2bc-ec4e6d171d37 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.200106] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2bc834-81d8-4804-9226-edcd1f71c700 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.207785] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f26dd10-1443-4983-829b-7d045595e993 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.222216] env[62923]: DEBUG nova.compute.provider_tree [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.357561] env[62923]: DEBUG nova.network.neutron [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.385209] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Successfully updated port: eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.396720] env[62923]: DEBUG nova.network.neutron [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Successfully updated port: ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.564865] env[62923]: DEBUG nova.network.neutron [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.728773] env[62923]: DEBUG nova.scheduler.client.report [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.859616] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d7d9cff-191a-4c67-8850-2fea9ed21e04 req-1c4bc991-a8dc-4893-be14-0dd1f1af21c8 service nova] Releasing lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.888069] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.888069] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquired lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.888069] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.900929] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.901134] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.901227] env[62923]: DEBUG nova.network.neutron [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.067767] env[62923]: INFO nova.compute.manager [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] [instance: db26908c-6aa6-47b8-a3c4-461247e36d85] Took 1.04 seconds to deallocate network for instance. [ 824.236733] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.236733] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.240071] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.161s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.241530] env[62923]: INFO nova.compute.claims [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.452095] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.486337] env[62923]: DEBUG nova.network.neutron [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.612721] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received event network-vif-plugged-eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.613038] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Acquiring lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.613269] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.613445] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.613617] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] No waiting events found dispatching network-vif-plugged-eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 824.613783] env[62923]: WARNING nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received unexpected event network-vif-plugged-eb88e129-2421-443f-81c0-c94079f5d2a9 for instance with vm_state building and task_state spawning. [ 824.614053] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Received event network-vif-plugged-ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.614287] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Acquiring lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.614479] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.614638] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.614876] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] No waiting events found dispatching network-vif-plugged-ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 824.615055] env[62923]: WARNING nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Received unexpected event network-vif-plugged-ba4b7bed-fcd3-414c-849f-c9687d3dd490 for instance with vm_state building and task_state spawning. [ 824.615215] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received event network-changed-eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 824.615361] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Refreshing instance network info cache due to event network-changed-eb88e129-2421-443f-81c0-c94079f5d2a9. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 824.615522] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Acquiring lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.748828] env[62923]: DEBUG nova.compute.utils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 824.751061] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 824.751242] env[62923]: DEBUG nova.network.neutron [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.991633] env[62923]: DEBUG nova.network.neutron [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [{"id": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "address": "fa:16:3e:8a:0d:06", "network": {"id": "4f1191c4-7834-47c1-9c12-06d257cf913f", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-828952864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d013513ad708456f9a827c8d4974beec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba4b7bed-fc", "ovs_interfaceid": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.028401] env[62923]: DEBUG nova.policy [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '307fd359c3b542c798ad8f84be4d018d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2f39b542fd245028bbb6c9f939434d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.110676] env[62923]: INFO nova.scheduler.client.report [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Deleted allocations for instance db26908c-6aa6-47b8-a3c4-461247e36d85 [ 825.255191] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 825.421534] env[62923]: DEBUG nova.network.neutron [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updating instance_info_cache with network_info: [{"id": "6785022c-2ac9-4a61-ad21-298adb7ba096", "address": "fa:16:3e:ef:cd:42", "network": {"id": "fe8eddfa-826c-489d-a422-801d812cb5a5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1912271493", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6785022c-2a", "ovs_interfaceid": "6785022c-2ac9-4a61-ad21-298adb7ba096", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb88e129-2421-443f-81c0-c94079f5d2a9", "address": "fa:16:3e:12:35:64", "network": {"id": "7f8e422e-98c2-4d7f-bd63-088b1021ad33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058654876", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb88e129-24", "ovs_interfaceid": "eb88e129-2421-443f-81c0-c94079f5d2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.496744] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Releasing lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.497136] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Instance network_info: |[{"id": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "address": "fa:16:3e:8a:0d:06", "network": {"id": "4f1191c4-7834-47c1-9c12-06d257cf913f", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-828952864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d013513ad708456f9a827c8d4974beec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba4b7bed-fc", "ovs_interfaceid": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 825.497627] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:0d:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba4b7bed-fcd3-414c-849f-c9687d3dd490', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.506017] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Creating folder: Project (d013513ad708456f9a827c8d4974beec). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.507714] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-539ebc80-b1b3-4e13-ab69-b579646f85a2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.520170] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Created folder: Project (d013513ad708456f9a827c8d4974beec) in parent group-v291405. [ 825.520943] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Creating folder: Instances. Parent ref: group-v291422. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.520943] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70ef2bd5-8d09-4df1-b55b-32e5e205119f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.531811] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Created folder: Instances in parent group-v291422. [ 825.532192] env[62923]: DEBUG oslo.service.loopingcall [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.532477] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.532803] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fee0e3b-2f0e-48f3-b7be-9a391a6ca8f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.557396] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.557396] env[62923]: value = "task-1369893" [ 825.557396] env[62923]: _type = "Task" [ 825.557396] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.572718] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369893, 'name': CreateVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.621389] env[62923]: DEBUG oslo_concurrency.lockutils [None req-91a37e8b-c883-45ee-bcef-3671ad3cf0ad tempest-TenantUsagesTestJSON-1389690237 tempest-TenantUsagesTestJSON-1389690237-project-member] Lock "db26908c-6aa6-47b8-a3c4-461247e36d85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.516s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.651659] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3423e3da-8034-4326-b3ba-41cba2c5db24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.661882] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c56ecc-42d2-4b57-bdff-947617ff9bbc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.717575] env[62923]: DEBUG nova.network.neutron [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Successfully created port: 267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.721634] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff25245c-1a5c-446b-8b69-1cbf34b6ed27 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.729441] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e015891-c043-478c-98b4-9ea047ce3b3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.742878] env[62923]: DEBUG nova.compute.provider_tree [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.925109] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Releasing lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.925550] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Instance network_info: |[{"id": "6785022c-2ac9-4a61-ad21-298adb7ba096", "address": "fa:16:3e:ef:cd:42", "network": {"id": "fe8eddfa-826c-489d-a422-801d812cb5a5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1912271493", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6785022c-2a", "ovs_interfaceid": "6785022c-2ac9-4a61-ad21-298adb7ba096", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb88e129-2421-443f-81c0-c94079f5d2a9", "address": "fa:16:3e:12:35:64", "network": {"id": "7f8e422e-98c2-4d7f-bd63-088b1021ad33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058654876", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb88e129-24", "ovs_interfaceid": "eb88e129-2421-443f-81c0-c94079f5d2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 825.925990] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Acquired lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.926144] env[62923]: DEBUG nova.network.neutron [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Refreshing network info cache for port eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.927461] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:cd:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98d96b75-ac36-499a-adc2-130c8c1d55ca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6785022c-2ac9-4a61-ad21-298adb7ba096', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:35:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4b6ddb2-2e19-4031-9b22-add90d41a114', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb88e129-2421-443f-81c0-c94079f5d2a9', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.936660] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Creating folder: Project (eb4fde1412c240b288e7337a06fae728). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.937775] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95de9dd1-0546-425e-a90d-886bd1e38a48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.948696] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Created folder: Project (eb4fde1412c240b288e7337a06fae728) in parent group-v291405. [ 825.949012] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Creating folder: Instances. Parent ref: group-v291425. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.949165] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aac1f1b-7087-43c8-a583-67ecc86af12a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.958405] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Created folder: Instances in parent group-v291425. [ 825.958638] env[62923]: DEBUG oslo.service.loopingcall [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.958823] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.959041] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86311cc0-9efa-4acc-a09c-45c1d9ed39cc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.980264] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.980264] env[62923]: value = "task-1369896" [ 825.980264] env[62923]: _type = "Task" [ 825.980264] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.988075] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369896, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.067771] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369893, 'name': CreateVM_Task, 'duration_secs': 0.316476} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.067952] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.075713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.075852] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.076197] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 826.076457] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdb5d40f-8fdd-45fc-971c-0e017d39bcb1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.081527] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 826.081527] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524a859a-5ab2-de43-c495-1da0ffb97ee0" [ 826.081527] env[62923]: _type = "Task" [ 826.081527] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.089502] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524a859a-5ab2-de43-c495-1da0ffb97ee0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.125269] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 826.246977] env[62923]: DEBUG nova.scheduler.client.report [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 826.263647] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.291588] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.291848] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.292008] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.292198] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.292343] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.292490] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.292734] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.292890] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.293065] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.293231] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.293398] env[62923]: DEBUG nova.virt.hardware [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.294294] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55d75cf-a0f8-4aa9-ae1e-f9c123b9a5f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.303314] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd90547-20db-4a61-bf52-0e3924429872 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.492724] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369896, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.592540] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524a859a-5ab2-de43-c495-1da0ffb97ee0, 'name': SearchDatastore_Task, 'duration_secs': 0.018201} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.592889] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.593167] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.593456] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.593638] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.593847] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.594162] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92e64ace-ed32-445e-bac3-8655de180e90 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.602230] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.602473] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.603257] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fc7d61d-24f8-41ba-bb15-db8f97bc3581 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.610835] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 826.610835] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c05128-578f-87e1-a665-7ca18293709e" [ 826.610835] env[62923]: _type = "Task" [ 826.610835] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.621307] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c05128-578f-87e1-a665-7ca18293709e, 'name': SearchDatastore_Task, 'duration_secs': 0.00762} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.622077] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25c77b41-9da4-4356-bcfe-badbdf70aac9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.627119] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 826.627119] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d4a09d-2698-7d67-99f9-f284707f8f7a" [ 826.627119] env[62923]: _type = "Task" [ 826.627119] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.644954] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d4a09d-2698-7d67-99f9-f284707f8f7a, 'name': SearchDatastore_Task, 'duration_secs': 0.010071} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.644954] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.645177] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 81cca322-c1a0-4fbd-8013-0e4a4694ecfd/81cca322-c1a0-4fbd-8013-0e4a4694ecfd.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.645599] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e7994b3-f3d6-40f4-8ccc-870532ee4cd1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.649204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.652977] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 826.652977] env[62923]: value = "task-1369897" [ 826.652977] env[62923]: _type = "Task" [ 826.652977] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.660761] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.752489] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.753422] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 826.757720] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.743s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.783334] env[62923]: DEBUG nova.network.neutron [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updated VIF entry in instance network info cache for port eb88e129-2421-443f-81c0-c94079f5d2a9. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.783747] env[62923]: DEBUG nova.network.neutron [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updating instance_info_cache with network_info: [{"id": "6785022c-2ac9-4a61-ad21-298adb7ba096", "address": "fa:16:3e:ef:cd:42", "network": {"id": "fe8eddfa-826c-489d-a422-801d812cb5a5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1912271493", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98d96b75-ac36-499a-adc2-130c8c1d55ca", "external-id": "nsx-vlan-transportzone-564", "segmentation_id": 564, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6785022c-2a", "ovs_interfaceid": "6785022c-2ac9-4a61-ad21-298adb7ba096", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb88e129-2421-443f-81c0-c94079f5d2a9", "address": "fa:16:3e:12:35:64", "network": {"id": "7f8e422e-98c2-4d7f-bd63-088b1021ad33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058654876", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb88e129-24", "ovs_interfaceid": "eb88e129-2421-443f-81c0-c94079f5d2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.992860] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369896, 'name': CreateVM_Task, 'duration_secs': 0.568421} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.993052] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.993852] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.994029] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.994376] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 826.994720] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f81dd14-bc57-4f7f-af34-06a0fc1238c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.001809] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 827.001809] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52061ef3-b260-1773-40c8-7057007436d5" [ 827.001809] env[62923]: _type = "Task" [ 827.001809] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.012031] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52061ef3-b260-1773-40c8-7057007436d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.163222] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473217} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.163490] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 81cca322-c1a0-4fbd-8013-0e4a4694ecfd/81cca322-c1a0-4fbd-8013-0e4a4694ecfd.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.163685] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.163933] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64c62333-f061-4502-84ca-c7dd84cb9304 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.170131] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 827.170131] env[62923]: value = "task-1369898" [ 827.170131] env[62923]: _type = "Task" [ 827.170131] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.178103] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369898, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.263190] env[62923]: DEBUG nova.compute.utils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.269287] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 827.269287] env[62923]: DEBUG nova.network.neutron [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.271877] env[62923]: DEBUG nova.compute.manager [req-e14a2c3d-b090-445d-a79b-c7f7ddf5d6f0 req-8f0d4c92-333a-4b02-8891-52e84160d8c1 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Received event network-vif-plugged-267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.272167] env[62923]: DEBUG oslo_concurrency.lockutils [req-e14a2c3d-b090-445d-a79b-c7f7ddf5d6f0 req-8f0d4c92-333a-4b02-8891-52e84160d8c1 service nova] Acquiring lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.272335] env[62923]: DEBUG oslo_concurrency.lockutils [req-e14a2c3d-b090-445d-a79b-c7f7ddf5d6f0 req-8f0d4c92-333a-4b02-8891-52e84160d8c1 service nova] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.272473] env[62923]: DEBUG oslo_concurrency.lockutils [req-e14a2c3d-b090-445d-a79b-c7f7ddf5d6f0 req-8f0d4c92-333a-4b02-8891-52e84160d8c1 service nova] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.272637] env[62923]: DEBUG nova.compute.manager [req-e14a2c3d-b090-445d-a79b-c7f7ddf5d6f0 req-8f0d4c92-333a-4b02-8891-52e84160d8c1 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] No waiting events found dispatching network-vif-plugged-267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 827.272791] env[62923]: WARNING nova.compute.manager [req-e14a2c3d-b090-445d-a79b-c7f7ddf5d6f0 req-8f0d4c92-333a-4b02-8891-52e84160d8c1 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Received unexpected event network-vif-plugged-267bd057-e537-4d4f-a050-8b9b176c0786 for instance with vm_state building and task_state spawning. [ 827.286768] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Releasing lock "refresh_cache-98974fb7-049a-4c72-a352-bc0a50d2a879" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.286915] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Received event network-changed-ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.287081] env[62923]: DEBUG nova.compute.manager [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Refreshing instance network info cache due to event network-changed-ba4b7bed-fcd3-414c-849f-c9687d3dd490. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 827.287290] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Acquiring lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.287427] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Acquired lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.287583] env[62923]: DEBUG nova.network.neutron [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Refreshing network info cache for port ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.327467] env[62923]: DEBUG nova.policy [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '182e1b6f26ed401da24d07a85f993802', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '011a5ec25af44f92961be00f82c10c08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 827.512959] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52061ef3-b260-1773-40c8-7057007436d5, 'name': SearchDatastore_Task, 'duration_secs': 0.054606} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.515858] env[62923]: DEBUG nova.network.neutron [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Successfully updated port: 267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.517047] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.517279] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.517504] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.517647] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.517819] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.518450] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-667a4c67-5cf5-4ae4-9355-65bd52fc9b21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.527353] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.527542] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.528260] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c336e41-bc90-4690-9809-21591697c6c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.541945] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 827.541945] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299e555-09c6-8da9-ef02-0f1f3a0418a2" [ 827.541945] env[62923]: _type = "Task" [ 827.541945] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.550542] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299e555-09c6-8da9-ef02-0f1f3a0418a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.551647] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57565ce-a230-48a5-b17e-d8c50db68dc6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.557659] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c339b2df-0070-4585-bff7-ef430c44a22b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.588035] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb2e44c-94ba-4870-841c-13aceb3545f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.595342] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5183c876-b27a-4913-a813-f2ca2b0c80c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.608032] env[62923]: DEBUG nova.compute.provider_tree [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.679308] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369898, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101493} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.680036] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.680398] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac48e12-5309-4fc1-8f70-270b9f9465bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.702678] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 81cca322-c1a0-4fbd-8013-0e4a4694ecfd/81cca322-c1a0-4fbd-8013-0e4a4694ecfd.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.702946] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37b014a8-849e-45e3-ba91-aa937e6c6eb9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.721831] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 827.721831] env[62923]: value = "task-1369899" [ 827.721831] env[62923]: _type = "Task" [ 827.721831] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.729094] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369899, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.768509] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 827.778611] env[62923]: DEBUG nova.network.neutron [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Successfully created port: 1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.019984] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "refresh_cache-6fa4d8a8-093f-4ae8-9148-f15f5bf98944" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.020177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquired lock "refresh_cache-6fa4d8a8-093f-4ae8-9148-f15f5bf98944" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.020331] env[62923]: DEBUG nova.network.neutron [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.052277] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299e555-09c6-8da9-ef02-0f1f3a0418a2, 'name': SearchDatastore_Task, 'duration_secs': 0.008871} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.053031] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0336076f-b25f-4744-a52b-5c812c0d5c74 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.058019] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 828.058019] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52027eb3-4299-0c75-97ff-7f157984b4a5" [ 828.058019] env[62923]: _type = "Task" [ 828.058019] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.068993] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52027eb3-4299-0c75-97ff-7f157984b4a5, 'name': SearchDatastore_Task, 'duration_secs': 0.008369} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.071044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.071309] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 98974fb7-049a-4c72-a352-bc0a50d2a879/98974fb7-049a-4c72-a352-bc0a50d2a879.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 828.071547] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f306fe6-8286-4177-8be8-66ab6777c12b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.078984] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 828.078984] env[62923]: value = "task-1369900" [ 828.078984] env[62923]: _type = "Task" [ 828.078984] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.086806] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369900, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.112018] env[62923]: DEBUG nova.scheduler.client.report [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 828.235169] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369899, 'name': ReconfigVM_Task, 'duration_secs': 0.264415} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.235576] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 81cca322-c1a0-4fbd-8013-0e4a4694ecfd/81cca322-c1a0-4fbd-8013-0e4a4694ecfd.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.236139] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6282a256-b43b-4147-8610-08c22d1038c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.242740] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 828.242740] env[62923]: value = "task-1369901" [ 828.242740] env[62923]: _type = "Task" [ 828.242740] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.253410] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369901, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.329672] env[62923]: DEBUG nova.network.neutron [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updated VIF entry in instance network info cache for port ba4b7bed-fcd3-414c-849f-c9687d3dd490. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 828.330113] env[62923]: DEBUG nova.network.neutron [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [{"id": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "address": "fa:16:3e:8a:0d:06", "network": {"id": "4f1191c4-7834-47c1-9c12-06d257cf913f", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-828952864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d013513ad708456f9a827c8d4974beec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba4b7bed-fc", "ovs_interfaceid": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.565805] env[62923]: DEBUG nova.network.neutron [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.590453] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369900, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458362} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.590744] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 98974fb7-049a-4c72-a352-bc0a50d2a879/98974fb7-049a-4c72-a352-bc0a50d2a879.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.590971] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.591228] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a25ce6f4-a5c4-48c9-8c62-0e6acb19a0b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.596548] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 828.596548] env[62923]: value = "task-1369902" [ 828.596548] env[62923]: _type = "Task" [ 828.596548] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.604210] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369902, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.618105] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.860s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.618105] env[62923]: ERROR nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Traceback (most recent call last): [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self.driver.spawn(context, instance, image_meta, [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] vm_ref = self.build_virtual_machine(instance, [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] vif_infos = vmwarevif.get_vif_info(self._session, [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] for vif in network_info: [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return self._sync_wrapper(fn, *args, **kwargs) [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self.wait() [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self[:] = self._gt.wait() [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return self._exit_event.wait() [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] result = hub.switch() [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return self.greenlet.switch() [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] result = function(*args, **kwargs) [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] return func(*args, **kwargs) [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] raise e [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] nwinfo = self.network_api.allocate_for_instance( [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] created_port_ids = self._update_ports_for_instance( [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 828.618105] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] with excutils.save_and_reraise_exception(): [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] self.force_reraise() [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] raise self.value [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] updated_port = self._update_port( [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] _ensure_no_port_binding_failure(port) [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] raise exception.PortBindingFailed(port_id=port['id']) [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] nova.exception.PortBindingFailed: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. [ 828.619144] env[62923]: ERROR nova.compute.manager [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] [ 828.619144] env[62923]: DEBUG nova.compute.utils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 828.619595] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.291s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.622618] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Build of instance 83ead303-c5b9-4600-935b-fa1a77689dcf was re-scheduled: Binding failed for port 830121e2-d6a1-4b98-b104-48e570c31125, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 828.623046] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 828.623281] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.623425] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.623581] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.754893] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369901, 'name': Rename_Task, 'duration_secs': 0.36086} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.755241] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.755533] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f229ecdb-e6df-4f05-89a0-9b312ab852e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.761681] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 828.761681] env[62923]: value = "task-1369903" [ 828.761681] env[62923]: _type = "Task" [ 828.761681] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.769034] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369903, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.778289] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 828.804516] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 828.804805] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 828.804966] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.805202] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 828.805354] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.805499] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 828.805702] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 828.805853] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 828.806027] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 828.806207] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 828.806376] env[62923]: DEBUG nova.virt.hardware [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 828.807199] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb95aa1-56cf-480a-9b52-fa36d9ab9afc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.814037] env[62923]: DEBUG nova.network.neutron [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Updating instance_info_cache with network_info: [{"id": "267bd057-e537-4d4f-a050-8b9b176c0786", "address": "fa:16:3e:eb:57:40", "network": {"id": "1267dba2-5da5-4e2c-b510-5ef72517a4e2", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1522439589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2f39b542fd245028bbb6c9f939434d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267bd057-e5", "ovs_interfaceid": "267bd057-e537-4d4f-a050-8b9b176c0786", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.816321] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4da5a1-5e6c-4cce-9dda-806b08d82a21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.835295] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd38bc2f-de10-4394-81b4-945c7e890c5d req-321a1e72-5ee2-476a-abd6-f91bb92554b1 service nova] Releasing lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.106599] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369902, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069689} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.106751] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.107516] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362dbadb-93ae-4841-a544-522e6c2935e8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.130779] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 98974fb7-049a-4c72-a352-bc0a50d2a879/98974fb7-049a-4c72-a352-bc0a50d2a879.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.135401] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27dd39eb-cfbd-4945-9c0b-ce3a04abdfb1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.156917] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 829.156917] env[62923]: value = "task-1369904" [ 829.156917] env[62923]: _type = "Task" [ 829.156917] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.164927] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369904, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.170608] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.276239] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369903, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.277785] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.295836] env[62923]: DEBUG nova.compute.manager [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Received event network-changed-267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.296097] env[62923]: DEBUG nova.compute.manager [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Refreshing instance network info cache due to event network-changed-267bd057-e537-4d4f-a050-8b9b176c0786. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 829.296241] env[62923]: DEBUG oslo_concurrency.lockutils [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] Acquiring lock "refresh_cache-6fa4d8a8-093f-4ae8-9148-f15f5bf98944" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.320900] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Releasing lock "refresh_cache-6fa4d8a8-093f-4ae8-9148-f15f5bf98944" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.321236] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Instance network_info: |[{"id": "267bd057-e537-4d4f-a050-8b9b176c0786", "address": "fa:16:3e:eb:57:40", "network": {"id": "1267dba2-5da5-4e2c-b510-5ef72517a4e2", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1522439589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2f39b542fd245028bbb6c9f939434d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267bd057-e5", "ovs_interfaceid": "267bd057-e537-4d4f-a050-8b9b176c0786", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 829.321527] env[62923]: DEBUG oslo_concurrency.lockutils [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] Acquired lock "refresh_cache-6fa4d8a8-093f-4ae8-9148-f15f5bf98944" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.321702] env[62923]: DEBUG nova.network.neutron [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Refreshing network info cache for port 267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.323494] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:57:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4734e5e-2a76-4bda-8905-70c9bf9e007f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '267bd057-e537-4d4f-a050-8b9b176c0786', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.330523] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Creating folder: Project (f2f39b542fd245028bbb6c9f939434d9). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.333859] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a192221-fd70-4a92-b26c-db4caf8c2c36 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.344234] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Created folder: Project (f2f39b542fd245028bbb6c9f939434d9) in parent group-v291405. [ 829.344418] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Creating folder: Instances. Parent ref: group-v291428. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.346857] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aea9a2f1-c119-4c3c-98fa-0503a9f309ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.355242] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Created folder: Instances in parent group-v291428. [ 829.355463] env[62923]: DEBUG oslo.service.loopingcall [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.355642] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.355852] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5def6f09-0e0c-408b-95cd-3b31805916c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.377621] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.377621] env[62923]: value = "task-1369907" [ 829.377621] env[62923]: _type = "Task" [ 829.377621] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.386623] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369907, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.408109] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56e26d9-0be4-405b-87eb-ae3a0031af23 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.414588] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a2d642-3f84-41ad-83e5-468c56d0e472 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.443898] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a2a62d-ee39-4840-bf74-af2b7673cbc7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.451997] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990bd76c-903a-4533-b38b-9c4d543078a7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.464953] env[62923]: DEBUG nova.compute.provider_tree [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.622125] env[62923]: DEBUG nova.network.neutron [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Successfully updated port: 1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.667252] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369904, 'name': ReconfigVM_Task, 'duration_secs': 0.318678} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.667534] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 98974fb7-049a-4c72-a352-bc0a50d2a879/98974fb7-049a-4c72-a352-bc0a50d2a879.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.668230] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51476fb8-f589-4814-809c-5546d4b30ec2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.676317] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 829.676317] env[62923]: value = "task-1369908" [ 829.676317] env[62923]: _type = "Task" [ 829.676317] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.683438] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369908, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.775622] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369903, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.782283] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-83ead303-c5b9-4600-935b-fa1a77689dcf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.782512] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 829.782715] env[62923]: DEBUG nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 829.782891] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.809061] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.888325] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369907, 'name': CreateVM_Task, 'duration_secs': 0.358568} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.888496] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.889167] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.889332] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.889645] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 829.889901] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5df499b6-3039-4a4e-8058-f4d1a90a9a10 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.894461] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 829.894461] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525a2517-fef5-ed15-7911-858b870919a6" [ 829.894461] env[62923]: _type = "Task" [ 829.894461] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.901815] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525a2517-fef5-ed15-7911-858b870919a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.968095] env[62923]: DEBUG nova.scheduler.client.report [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.125902] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.126091] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.126222] env[62923]: DEBUG nova.network.neutron [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.186531] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369908, 'name': Rename_Task, 'duration_secs': 0.154396} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.186816] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.187083] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9485c5ae-ebf6-496c-9df3-5d211c565e3b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.192684] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 830.192684] env[62923]: value = "task-1369909" [ 830.192684] env[62923]: _type = "Task" [ 830.192684] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.200747] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.273962] env[62923]: DEBUG oslo_vmware.api [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369903, 'name': PowerOnVM_Task, 'duration_secs': 1.336872} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.274317] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.274539] env[62923]: INFO nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Took 8.14 seconds to spawn the instance on the hypervisor. [ 830.274750] env[62923]: DEBUG nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 830.275539] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f670dc-2905-4992-87c5-be1c5fd696e7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.311943] env[62923]: DEBUG nova.network.neutron [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.353122] env[62923]: DEBUG nova.network.neutron [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Updated VIF entry in instance network info cache for port 267bd057-e537-4d4f-a050-8b9b176c0786. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.353501] env[62923]: DEBUG nova.network.neutron [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Updating instance_info_cache with network_info: [{"id": "267bd057-e537-4d4f-a050-8b9b176c0786", "address": "fa:16:3e:eb:57:40", "network": {"id": "1267dba2-5da5-4e2c-b510-5ef72517a4e2", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1522439589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2f39b542fd245028bbb6c9f939434d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267bd057-e5", "ovs_interfaceid": "267bd057-e537-4d4f-a050-8b9b176c0786", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.405596] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525a2517-fef5-ed15-7911-858b870919a6, 'name': SearchDatastore_Task, 'duration_secs': 0.014257} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.405922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.406234] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.406495] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.406660] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.406868] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.407170] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e49ab450-36de-4f52-a61f-c1dbcad4fc54 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.418302] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.418563] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.419229] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c53eb869-a665-4b58-9dc3-84e2f16b4dcf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.424783] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 830.424783] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5245ee62-b071-fff9-0b06-57a68aeebfcb" [ 830.424783] env[62923]: _type = "Task" [ 830.424783] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.432066] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5245ee62-b071-fff9-0b06-57a68aeebfcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.473517] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.854s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.474214] env[62923]: ERROR nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Traceback (most recent call last): [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self.driver.spawn(context, instance, image_meta, [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] vm_ref = self.build_virtual_machine(instance, [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] for vif in network_info: [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return self._sync_wrapper(fn, *args, **kwargs) [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self.wait() [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self[:] = self._gt.wait() [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return self._exit_event.wait() [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] result = hub.switch() [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return self.greenlet.switch() [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] result = function(*args, **kwargs) [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] return func(*args, **kwargs) [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] raise e [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] nwinfo = self.network_api.allocate_for_instance( [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] created_port_ids = self._update_ports_for_instance( [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] with excutils.save_and_reraise_exception(): [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.474214] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] self.force_reraise() [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] raise self.value [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] updated_port = self._update_port( [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] _ensure_no_port_binding_failure(port) [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] raise exception.PortBindingFailed(port_id=port['id']) [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] nova.exception.PortBindingFailed: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. [ 830.475329] env[62923]: ERROR nova.compute.manager [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] [ 830.475329] env[62923]: DEBUG nova.compute.utils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 830.476337] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.290s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.476527] env[62923]: DEBUG nova.objects.instance [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 830.479452] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Build of instance c2e48555-68b5-4ed0-8ad6-a87833538df8 was re-scheduled: Binding failed for port 40143bd4-2a73-46ca-bed1-d909b7cf967a, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 830.479899] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 830.480158] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.480330] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.480490] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.661974] env[62923]: DEBUG nova.network.neutron [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.703466] env[62923]: DEBUG oslo_vmware.api [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369909, 'name': PowerOnVM_Task, 'duration_secs': 0.484967} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.703714] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.703914] env[62923]: INFO nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Took 10.89 seconds to spawn the instance on the hypervisor. [ 830.704100] env[62923]: DEBUG nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 830.704928] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c2c87f-b86e-4d69-b54e-b0aa805ee550 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.799461] env[62923]: INFO nova.compute.manager [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Took 34.93 seconds to build instance. [ 830.816093] env[62923]: INFO nova.compute.manager [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 83ead303-c5b9-4600-935b-fa1a77689dcf] Took 1.03 seconds to deallocate network for instance. [ 830.839040] env[62923]: DEBUG nova.network.neutron [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.859711] env[62923]: DEBUG oslo_concurrency.lockutils [req-f094cfe8-13a6-41d2-aa0b-1434019ec824 req-5eee7ad1-3381-4862-8d76-4ddecb43f8c2 service nova] Releasing lock "refresh_cache-6fa4d8a8-093f-4ae8-9148-f15f5bf98944" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.935661] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5245ee62-b071-fff9-0b06-57a68aeebfcb, 'name': SearchDatastore_Task, 'duration_secs': 0.018083} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.936532] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dcf87ff-f4f6-47d8-83ca-2811f362d665 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.941644] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 830.941644] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524cccd5-6f72-b75d-c265-ee39cd438129" [ 830.941644] env[62923]: _type = "Task" [ 830.941644] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.949354] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524cccd5-6f72-b75d-c265-ee39cd438129, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.002224] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.082506] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.226711] env[62923]: INFO nova.compute.manager [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Took 37.19 seconds to build instance. [ 831.301247] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47652973-ba3d-426e-8529-a68e8d8e9da9 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.133s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.339382] env[62923]: DEBUG nova.compute.manager [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Received event network-vif-plugged-1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.339652] env[62923]: DEBUG oslo_concurrency.lockutils [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.339952] env[62923]: DEBUG oslo_concurrency.lockutils [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.339987] env[62923]: DEBUG oslo_concurrency.lockutils [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.340167] env[62923]: DEBUG nova.compute.manager [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] No waiting events found dispatching network-vif-plugged-1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 831.340332] env[62923]: WARNING nova.compute.manager [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Received unexpected event network-vif-plugged-1fb7d101-34b0-45db-b473-84c94e4b9aaa for instance with vm_state building and task_state spawning. [ 831.340487] env[62923]: DEBUG nova.compute.manager [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Received event network-changed-1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.340645] env[62923]: DEBUG nova.compute.manager [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Refreshing instance network info cache due to event network-changed-1fb7d101-34b0-45db-b473-84c94e4b9aaa. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 831.340938] env[62923]: DEBUG oslo_concurrency.lockutils [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.341595] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.341889] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Instance network_info: |[{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 831.342155] env[62923]: DEBUG oslo_concurrency.lockutils [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.342329] env[62923]: DEBUG nova.network.neutron [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Refreshing network info cache for port 1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.343539] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:95:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4c7a041-8e34-47f9-8ea1-d2f29414fd9d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fb7d101-34b0-45db-b473-84c94e4b9aaa', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.351196] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating folder: Project (011a5ec25af44f92961be00f82c10c08). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.354520] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adb73e59-ebaf-42d8-a1c6-0bee7be0d24a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.368346] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created folder: Project (011a5ec25af44f92961be00f82c10c08) in parent group-v291405. [ 831.368693] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating folder: Instances. Parent ref: group-v291431. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.368748] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a3cb95d-d26e-4e2b-ae06-b9bed8b2a4a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.377446] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created folder: Instances in parent group-v291431. [ 831.377673] env[62923]: DEBUG oslo.service.loopingcall [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.377854] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.378064] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3dcc299-d792-4990-a99f-493e286e3313 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.399198] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.399198] env[62923]: value = "task-1369912" [ 831.399198] env[62923]: _type = "Task" [ 831.399198] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.408988] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369912, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.452763] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524cccd5-6f72-b75d-c265-ee39cd438129, 'name': SearchDatastore_Task, 'duration_secs': 0.01596} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.453045] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.453311] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 6fa4d8a8-093f-4ae8-9148-f15f5bf98944/6fa4d8a8-093f-4ae8-9148-f15f5bf98944.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.453565] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ece691c-da30-4535-b360-80cee7d86e30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.459367] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 831.459367] env[62923]: value = "task-1369913" [ 831.459367] env[62923]: _type = "Task" [ 831.459367] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.467830] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.488988] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a01cb98-dd48-4b2c-bc00-e17a8cb23d37 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.490143] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.270s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.585176] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-c2e48555-68b5-4ed0-8ad6-a87833538df8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.585509] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 831.585781] env[62923]: DEBUG nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 831.586167] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 831.597652] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "98974fb7-049a-4c72-a352-bc0a50d2a879" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.614467] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.729112] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fb3811d3-d5fc-4aa9-97c1-7be7292ba590 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.292s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.730481] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.133s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.733308] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.733308] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.733308] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.733308] env[62923]: INFO nova.compute.manager [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Terminating instance [ 831.735050] env[62923]: DEBUG nova.compute.manager [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 831.735215] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 831.736134] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68b2b71-36aa-48e8-9bda-1a06a4a4d4d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.746012] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 831.746258] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fec28449-39a4-43a0-b6ea-a34941a24993 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.755041] env[62923]: DEBUG oslo_vmware.api [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 831.755041] env[62923]: value = "task-1369914" [ 831.755041] env[62923]: _type = "Task" [ 831.755041] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.764266] env[62923]: DEBUG oslo_vmware.api [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.771457] env[62923]: DEBUG nova.network.neutron [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updated VIF entry in instance network info cache for port 1fb7d101-34b0-45db-b473-84c94e4b9aaa. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.771910] env[62923]: DEBUG nova.network.neutron [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.804545] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.853211] env[62923]: INFO nova.scheduler.client.report [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocations for instance 83ead303-c5b9-4600-935b-fa1a77689dcf [ 831.910756] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369912, 'name': CreateVM_Task, 'duration_secs': 0.35032} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.910936] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 831.911670] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.911827] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.912180] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 831.912457] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e691af71-ca56-4059-b0c3-da419dd1898d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.918830] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 831.918830] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a83b07-9a10-b326-3a31-ee80853b48fa" [ 831.918830] env[62923]: _type = "Task" [ 831.918830] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.928366] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a83b07-9a10-b326-3a31-ee80853b48fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.969328] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50251} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.969584] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 6fa4d8a8-093f-4ae8-9148-f15f5bf98944/6fa4d8a8-093f-4ae8-9148-f15f5bf98944.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.969797] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.970085] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e993e2b4-3174-404e-8d4f-7e38ff7b76d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.975925] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 831.975925] env[62923]: value = "task-1369915" [ 831.975925] env[62923]: _type = "Task" [ 831.975925] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.984214] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369915, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.117650] env[62923]: DEBUG nova.network.neutron [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.226920] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3400402-ea99-4fb6-995a-cc2cf112acf7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.233746] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.237121] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f006b080-1d76-48a7-b77f-3e51aafc7a2a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.274456] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6843c414-8057-4bf7-a5f7-2b88c3adb10e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.277868] env[62923]: DEBUG oslo_concurrency.lockutils [req-600bf2fd-b1a6-4da3-b271-fbf94d21e0b9 req-4edee3ee-6837-4024-b54a-ff079c5731ce service nova] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.286895] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445b10c9-e414-4187-bfd7-fdab593213f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.289680] env[62923]: DEBUG oslo_vmware.api [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369914, 'name': PowerOffVM_Task, 'duration_secs': 0.270451} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.290206] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 832.290380] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 832.290913] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97f4a71e-6543-429d-bdae-a78f78c24007 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.300599] env[62923]: DEBUG nova.compute.provider_tree [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.326854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.362270] env[62923]: DEBUG oslo_concurrency.lockutils [None req-25b4dedc-338d-4ed2-9bac-46fb35503143 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "83ead303-c5b9-4600-935b-fa1a77689dcf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.873s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.406533] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 832.406697] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 832.406961] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Deleting the datastore file [datastore1] 98974fb7-049a-4c72-a352-bc0a50d2a879 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 832.407868] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0ca8eb3-24bf-4c7f-b974-b78549e063e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.415628] env[62923]: DEBUG oslo_vmware.api [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for the task: (returnval){ [ 832.415628] env[62923]: value = "task-1369917" [ 832.415628] env[62923]: _type = "Task" [ 832.415628] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.427446] env[62923]: DEBUG oslo_vmware.api [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.431391] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a83b07-9a10-b326-3a31-ee80853b48fa, 'name': SearchDatastore_Task, 'duration_secs': 0.023872} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.431391] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.431533] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.431716] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.431857] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.432050] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.432293] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2948fe4-5702-44f5-9c60-8a50b22cc159 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.441670] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.441854] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.442602] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ea39faf-4b36-4cd9-862e-0da60901b36f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.450052] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 832.450052] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b69ae6-7c8b-2b32-33cd-cc16d442da86" [ 832.450052] env[62923]: _type = "Task" [ 832.450052] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.457571] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b69ae6-7c8b-2b32-33cd-cc16d442da86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.485054] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369915, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069068} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.485213] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.485964] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c482b20-777a-40a2-97a4-ba016055b087 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.508867] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 6fa4d8a8-093f-4ae8-9148-f15f5bf98944/6fa4d8a8-093f-4ae8-9148-f15f5bf98944.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.509408] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80d9e27d-fa90-4bcb-858c-4b5541991621 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.529953] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 832.529953] env[62923]: value = "task-1369918" [ 832.529953] env[62923]: _type = "Task" [ 832.529953] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.538162] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369918, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.621101] env[62923]: INFO nova.compute.manager [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: c2e48555-68b5-4ed0-8ad6-a87833538df8] Took 1.03 seconds to deallocate network for instance. [ 832.759163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.804194] env[62923]: DEBUG nova.scheduler.client.report [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 832.865120] env[62923]: DEBUG nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.876988] env[62923]: DEBUG oslo_concurrency.lockutils [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.877370] env[62923]: DEBUG oslo_concurrency.lockutils [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.877633] env[62923]: INFO nova.compute.manager [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Rebooting instance [ 832.927206] env[62923]: DEBUG oslo_vmware.api [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Task: {'id': task-1369917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369508} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.927514] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.927704] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 832.927906] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.928191] env[62923]: INFO nova.compute.manager [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Took 1.19 seconds to destroy the instance on the hypervisor. [ 832.928371] env[62923]: DEBUG oslo.service.loopingcall [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.928590] env[62923]: DEBUG nova.compute.manager [-] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 832.928686] env[62923]: DEBUG nova.network.neutron [-] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.939609] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.939780] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 832.939912] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 832.960934] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b69ae6-7c8b-2b32-33cd-cc16d442da86, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.961756] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dbf2854-aeb2-4523-b59c-bf3ccb074083 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.967430] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 832.967430] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52976543-4d30-bd7c-6a2c-5e5e5dfb7610" [ 832.967430] env[62923]: _type = "Task" [ 832.967430] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.975441] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52976543-4d30-bd7c-6a2c-5e5e5dfb7610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.040525] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369918, 'name': ReconfigVM_Task, 'duration_secs': 0.503156} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.040826] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 6fa4d8a8-093f-4ae8-9148-f15f5bf98944/6fa4d8a8-093f-4ae8-9148-f15f5bf98944.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.041509] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-041a97e3-f98b-430c-b622-204f115c92c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.047686] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 833.047686] env[62923]: value = "task-1369919" [ 833.047686] env[62923]: _type = "Task" [ 833.047686] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.055944] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369919, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.310779] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.821s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.311449] env[62923]: ERROR nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Traceback (most recent call last): [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self.driver.spawn(context, instance, image_meta, [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] vm_ref = self.build_virtual_machine(instance, [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] for vif in network_info: [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return self._sync_wrapper(fn, *args, **kwargs) [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self.wait() [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self[:] = self._gt.wait() [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return self._exit_event.wait() [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] result = hub.switch() [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return self.greenlet.switch() [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] result = function(*args, **kwargs) [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] return func(*args, **kwargs) [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] raise e [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] nwinfo = self.network_api.allocate_for_instance( [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] created_port_ids = self._update_ports_for_instance( [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] with excutils.save_and_reraise_exception(): [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.311449] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] self.force_reraise() [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] raise self.value [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] updated_port = self._update_port( [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] _ensure_no_port_binding_failure(port) [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] raise exception.PortBindingFailed(port_id=port['id']) [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] nova.exception.PortBindingFailed: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. [ 833.312309] env[62923]: ERROR nova.compute.manager [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] [ 833.312309] env[62923]: DEBUG nova.compute.utils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 833.315377] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.146s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.318366] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Build of instance 8a369d56-8f85-4d04-ac6b-bf2eced7098f was re-scheduled: Binding failed for port 85a59bf2-c2a3-4e88-aa11-8f784b39fada, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 833.318787] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 833.319020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquiring lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.319165] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Acquired lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.319317] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.366506] env[62923]: DEBUG nova.compute.manager [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Received event network-changed-ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.366754] env[62923]: DEBUG nova.compute.manager [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Refreshing instance network info cache due to event network-changed-ba4b7bed-fcd3-414c-849f-c9687d3dd490. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 833.366894] env[62923]: DEBUG oslo_concurrency.lockutils [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] Acquiring lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.367373] env[62923]: DEBUG oslo_concurrency.lockutils [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] Acquired lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.367611] env[62923]: DEBUG nova.network.neutron [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Refreshing network info cache for port ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.392500] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.412656] env[62923]: DEBUG oslo_concurrency.lockutils [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.444528] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 833.444705] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Skipping network cache update for instance because it is being deleted. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 833.444920] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 833.445065] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 833.458887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "refresh_cache-7c98c50a-e7c7-4430-b5c6-dec88a78c397" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.459098] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquired lock "refresh_cache-7c98c50a-e7c7-4430-b5c6-dec88a78c397" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.459189] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Forcefully refreshing network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 833.459338] env[62923]: DEBUG nova.objects.instance [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lazy-loading 'info_cache' on Instance uuid 7c98c50a-e7c7-4430-b5c6-dec88a78c397 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 833.479496] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52976543-4d30-bd7c-6a2c-5e5e5dfb7610, 'name': SearchDatastore_Task, 'duration_secs': 0.031676} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.479744] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.479986] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8/1fef5eb2-acb0-4d00-81a3-c270af7df0e8.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.480328] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-282aaa78-d3f4-464f-9d37-1caa2dfd04c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.486025] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 833.486025] env[62923]: value = "task-1369920" [ 833.486025] env[62923]: _type = "Task" [ 833.486025] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.493422] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.523583] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.523812] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.557135] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369919, 'name': Rename_Task, 'duration_secs': 0.145505} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.557404] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.557640] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-353e461a-e4d1-45ca-9c1f-3fdd00d11a30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.564584] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 833.564584] env[62923]: value = "task-1369921" [ 833.564584] env[62923]: _type = "Task" [ 833.564584] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.577530] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.656343] env[62923]: INFO nova.scheduler.client.report [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleted allocations for instance c2e48555-68b5-4ed0-8ad6-a87833538df8 [ 833.841701] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.914029] env[62923]: DEBUG nova.network.neutron [-] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.002077] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369920, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.040952] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.084358] env[62923]: DEBUG oslo_vmware.api [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369921, 'name': PowerOnVM_Task, 'duration_secs': 0.492699} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.085898] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.087761] env[62923]: INFO nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Took 7.82 seconds to spawn the instance on the hypervisor. [ 834.087761] env[62923]: DEBUG nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 834.088836] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59108be-5c0f-4538-ae42-924047830ae0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.118406] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2feef7-0bef-43a3-832b-9f87cd72eaab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.129949] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d0e7d7-c81d-45df-ad0c-3b4aee18891c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.164108] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bac9eb-b9ba-4897-ba91-3101cd8b1e7c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.166839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b8e38c09-52ed-45e0-aca4-635767c9679d tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "c2e48555-68b5-4ed0-8ad6-a87833538df8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.294s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.174098] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4efbe0-04ef-4fee-996e-d3a5c9edf3b4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.191567] env[62923]: DEBUG nova.compute.provider_tree [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.307140] env[62923]: DEBUG nova.network.neutron [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updated VIF entry in instance network info cache for port ba4b7bed-fcd3-414c-849f-c9687d3dd490. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.307519] env[62923]: DEBUG nova.network.neutron [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [{"id": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "address": "fa:16:3e:8a:0d:06", "network": {"id": "4f1191c4-7834-47c1-9c12-06d257cf913f", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-828952864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d013513ad708456f9a827c8d4974beec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba4b7bed-fc", "ovs_interfaceid": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.418301] env[62923]: INFO nova.compute.manager [-] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Took 1.49 seconds to deallocate network for instance. [ 834.487266] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.496869] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696757} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.498073] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8/1fef5eb2-acb0-4d00-81a3-c270af7df0e8.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.498073] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.498186] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc78aad2-fc4f-4330-b043-cd769e662086 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.505017] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 834.505017] env[62923]: value = "task-1369922" [ 834.505017] env[62923]: _type = "Task" [ 834.505017] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.512540] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369922, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.543291] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Releasing lock "refresh_cache-8a369d56-8f85-4d04-ac6b-bf2eced7098f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.543558] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 834.543779] env[62923]: DEBUG nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 834.543966] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.562891] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.612893] env[62923]: INFO nova.compute.manager [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Took 35.95 seconds to build instance. [ 834.670127] env[62923]: DEBUG nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 834.694249] env[62923]: DEBUG nova.scheduler.client.report [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 834.810171] env[62923]: DEBUG oslo_concurrency.lockutils [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] Releasing lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.810459] env[62923]: DEBUG nova.compute.manager [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received event network-vif-deleted-6785022c-2ac9-4a61-ad21-298adb7ba096 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.810630] env[62923]: INFO nova.compute.manager [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Neutron deleted interface 6785022c-2ac9-4a61-ad21-298adb7ba096; detaching it from the instance and deleting it from the info cache [ 834.810880] env[62923]: DEBUG nova.network.neutron [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updating instance_info_cache with network_info: [{"id": "eb88e129-2421-443f-81c0-c94079f5d2a9", "address": "fa:16:3e:12:35:64", "network": {"id": "7f8e422e-98c2-4d7f-bd63-088b1021ad33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058654876", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.41", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "eb4fde1412c240b288e7337a06fae728", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb88e129-24", "ovs_interfaceid": "eb88e129-2421-443f-81c0-c94079f5d2a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.812100] env[62923]: DEBUG oslo_concurrency.lockutils [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquired lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.812281] env[62923]: DEBUG nova.network.neutron [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 834.925985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.015824] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369922, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066029} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.016144] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.016874] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0847b60b-cf85-4812-85d7-8ba5fc569667 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.038800] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8/1fef5eb2-acb0-4d00-81a3-c270af7df0e8.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.039076] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-022c444f-a8c9-40c7-8a2f-277fe86bde56 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.058306] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 835.058306] env[62923]: value = "task-1369923" [ 835.058306] env[62923]: _type = "Task" [ 835.058306] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.065847] env[62923]: DEBUG nova.network.neutron [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.067075] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369923, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.077693] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.114860] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a6f077d5-12f1-45eb-b17b-cec4e5945b54 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.561s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.197698] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.198469] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.883s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.199180] env[62923]: ERROR nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Traceback (most recent call last): [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self.driver.spawn(context, instance, image_meta, [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] vm_ref = self.build_virtual_machine(instance, [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] vif_infos = vmwarevif.get_vif_info(self._session, [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] for vif in network_info: [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/model.py", line 614, in __iter__ [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return self._sync_wrapper(fn, *args, **kwargs) [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/model.py", line 605, in _sync_wrapper [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self.wait() [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/model.py", line 637, in wait [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self[:] = self._gt.wait() [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return self._exit_event.wait() [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] result = hub.switch() [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return self.greenlet.switch() [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] result = function(*args, **kwargs) [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] return func(*args, **kwargs) [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] raise e [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] nwinfo = self.network_api.allocate_for_instance( [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] created_port_ids = self._update_ports_for_instance( [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] with excutils.save_and_reraise_exception(): [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.199180] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] self.force_reraise() [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] raise self.value [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] updated_port = self._update_port( [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] _ensure_no_port_binding_failure(port) [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] raise exception.PortBindingFailed(port_id=port['id']) [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] nova.exception.PortBindingFailed: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. [ 835.200230] env[62923]: ERROR nova.compute.manager [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] [ 835.200230] env[62923]: DEBUG nova.compute.utils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 835.201380] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.254s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.201596] env[62923]: DEBUG nova.objects.instance [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lazy-loading 'resources' on Instance uuid 7c98c50a-e7c7-4430-b5c6-dec88a78c397 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 835.202813] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Build of instance 08d39755-f94c-45aa-bfb5-f179e8a370db was re-scheduled: Binding failed for port bdd6a040-f201-4806-8fa8-86008708d23c, please check neutron logs for more information. {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 835.203245] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Unplugging VIFs for instance {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 835.203463] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquiring lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.203607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Acquired lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.203761] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.315033] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb50b914-fcf0-481f-b8ca-fa5d57fc38c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.326474] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dec69a7-27a2-480f-bf01-9ec831dee338 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.350729] env[62923]: DEBUG nova.compute.manager [req-1537f819-f975-4ac5-81e9-77718e5f798a req-edb0dc03-f160-4378-af1a-789c173dedd5 service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Detach interface failed, port_id=6785022c-2ac9-4a61-ad21-298adb7ba096, reason: Instance 98974fb7-049a-4c72-a352-bc0a50d2a879 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 835.397825] env[62923]: DEBUG nova.compute.manager [req-03cb7ffe-5913-4b01-92a0-7013d72ee61a req-89f362a5-04fa-4162-9755-000d8377887d service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Received event network-vif-deleted-eb88e129-2421-443f-81c0-c94079f5d2a9 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.398034] env[62923]: INFO nova.compute.manager [req-03cb7ffe-5913-4b01-92a0-7013d72ee61a req-89f362a5-04fa-4162-9755-000d8377887d service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Neutron deleted interface eb88e129-2421-443f-81c0-c94079f5d2a9; detaching it from the instance and deleting it from the info cache [ 835.398208] env[62923]: DEBUG nova.network.neutron [req-03cb7ffe-5913-4b01-92a0-7013d72ee61a req-89f362a5-04fa-4162-9755-000d8377887d service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.568965] env[62923]: INFO nova.compute.manager [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] [instance: 8a369d56-8f85-4d04-ac6b-bf2eced7098f] Took 1.02 seconds to deallocate network for instance. [ 835.571631] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.580372] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Releasing lock "refresh_cache-7c98c50a-e7c7-4430-b5c6-dec88a78c397" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.580607] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Updated the network info_cache for instance {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 835.619531] env[62923]: DEBUG nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.668545] env[62923]: DEBUG nova.network.neutron [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [{"id": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "address": "fa:16:3e:8a:0d:06", "network": {"id": "4f1191c4-7834-47c1-9c12-06d257cf913f", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-828952864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d013513ad708456f9a827c8d4974beec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba4b7bed-fc", "ovs_interfaceid": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.724106] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.824278] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.902892] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-afae0003-ce8c-42fa-be12-0348c0a4a0ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.911984] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e018b9dd-a28f-4706-9413-cc39aab9e4de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.939386] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.939729] env[62923]: DEBUG nova.compute.manager [req-03cb7ffe-5913-4b01-92a0-7013d72ee61a req-89f362a5-04fa-4162-9755-000d8377887d service nova] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Detach interface failed, port_id=eb88e129-2421-443f-81c0-c94079f5d2a9, reason: Instance 98974fb7-049a-4c72-a352-bc0a50d2a879 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 835.940931] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.941146] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 835.941852] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37d1a0b-2c5a-483c-ad0e-ff5c90bf98a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.948488] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8edea4-d95e-4714-b7a8-43bf2775ac9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.954638] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.954891] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.955107] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.955319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.955450] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.981644] env[62923]: INFO nova.compute.manager [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Terminating instance [ 835.983688] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec27606-b5a0-46e5-88cd-94a45f4617ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.986489] env[62923]: DEBUG nova.compute.manager [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 835.986677] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.987455] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ab59bd-d6ea-4441-a7ff-7bf1a57ee83b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.996076] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d63795-3682-4b7e-aebd-7ed6abd3d687 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.999604] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.999813] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3297ad76-e6a6-41e9-acc6-a1e3a19c55d2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.010387] env[62923]: DEBUG nova.compute.provider_tree [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.012687] env[62923]: DEBUG oslo_vmware.api [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 836.012687] env[62923]: value = "task-1369924" [ 836.012687] env[62923]: _type = "Task" [ 836.012687] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.020112] env[62923]: DEBUG oslo_vmware.api [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.070664] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369923, 'name': ReconfigVM_Task, 'duration_secs': 0.616243} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.070977] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8/1fef5eb2-acb0-4d00-81a3-c270af7df0e8.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.071699] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b6ed2fd-a8d5-4877-aabc-e85a7512d4ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.085336] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 836.085336] env[62923]: value = "task-1369925" [ 836.085336] env[62923]: _type = "Task" [ 836.085336] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.096360] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369925, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.143280] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.172442] env[62923]: DEBUG oslo_concurrency.lockutils [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Releasing lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.174592] env[62923]: DEBUG nova.compute.manager [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 836.175624] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db25d1d-8972-42d3-8f93-34fb38fd7a2b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.326866] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Releasing lock "refresh_cache-08d39755-f94c-45aa-bfb5-f179e8a370db" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.326983] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62923) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 836.327136] env[62923]: DEBUG nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 836.327311] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.349124] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.514138] env[62923]: DEBUG nova.scheduler.client.report [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.528107] env[62923]: DEBUG oslo_vmware.api [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369924, 'name': PowerOffVM_Task, 'duration_secs': 0.19643} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.528755] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.528755] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.529545] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c17516b-fbf0-4836-a725-b6153a59a037 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.591233] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.591502] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.591623] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Deleting the datastore file [datastore1] 6fa4d8a8-093f-4ae8-9148-f15f5bf98944 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.592245] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43280804-0fb2-412d-80ce-604511017319 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.599234] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369925, 'name': Rename_Task, 'duration_secs': 0.164908} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.600423] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.600768] env[62923]: DEBUG oslo_vmware.api [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for the task: (returnval){ [ 836.600768] env[62923]: value = "task-1369927" [ 836.600768] env[62923]: _type = "Task" [ 836.600768] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.601592] env[62923]: INFO nova.scheduler.client.report [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Deleted allocations for instance 8a369d56-8f85-4d04-ac6b-bf2eced7098f [ 836.606858] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6137b9ff-c32f-43af-8fa3-e5a38ab48913 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.621851] env[62923]: DEBUG oslo_vmware.api [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.621851] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 836.621851] env[62923]: value = "task-1369928" [ 836.621851] env[62923]: _type = "Task" [ 836.621851] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.630053] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.852500] env[62923]: DEBUG nova.network.neutron [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.943382] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 836.943681] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.022070] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.024017] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.627s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.025949] env[62923]: INFO nova.compute.claims [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.038883] env[62923]: INFO nova.scheduler.client.report [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Deleted allocations for instance 7c98c50a-e7c7-4430-b5c6-dec88a78c397 [ 837.114429] env[62923]: DEBUG oslo_concurrency.lockutils [None req-75a03cd2-0e1e-4695-9d18-c34e1290e69e tempest-ServerRescueTestJSONUnderV235-1480207601 tempest-ServerRescueTestJSONUnderV235-1480207601-project-member] Lock "8a369d56-8f85-4d04-ac6b-bf2eced7098f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.987s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.119965] env[62923]: DEBUG oslo_vmware.api [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Task: {'id': task-1369927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21827} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.120565] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.120565] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.120650] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.120836] env[62923]: INFO nova.compute.manager [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Took 1.13 seconds to destroy the instance on the hypervisor. [ 837.120990] env[62923]: DEBUG oslo.service.loopingcall [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.121198] env[62923]: DEBUG nova.compute.manager [-] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 837.121287] env[62923]: DEBUG nova.network.neutron [-] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.131987] env[62923]: DEBUG oslo_vmware.api [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1369928, 'name': PowerOnVM_Task, 'duration_secs': 0.498178} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.132266] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.132465] env[62923]: INFO nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Took 8.35 seconds to spawn the instance on the hypervisor. [ 837.132646] env[62923]: DEBUG nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 837.133474] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7b9bcb-a51d-4ebf-83d5-fb4a5a530130 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.151682] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.151899] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.191814] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1841414-f6aa-441d-9fb0-00cae8dee5aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.202271] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Doing hard reboot of VM {{(pid=62923) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 837.202271] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-16338660-5d8d-4e4e-998b-efa95dad0fbb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.208142] env[62923]: DEBUG oslo_vmware.api [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 837.208142] env[62923]: value = "task-1369929" [ 837.208142] env[62923]: _type = "Task" [ 837.208142] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.219563] env[62923]: DEBUG oslo_vmware.api [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369929, 'name': ResetVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.355278] env[62923]: INFO nova.compute.manager [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] [instance: 08d39755-f94c-45aa-bfb5-f179e8a370db] Took 1.03 seconds to deallocate network for instance. [ 837.493606] env[62923]: DEBUG nova.compute.manager [req-c6573939-600d-497f-b190-51d8c79b615b req-2e062755-9f06-472d-989f-3a0589b8ab5f service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Received event network-vif-deleted-267bd057-e537-4d4f-a050-8b9b176c0786 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 837.493606] env[62923]: INFO nova.compute.manager [req-c6573939-600d-497f-b190-51d8c79b615b req-2e062755-9f06-472d-989f-3a0589b8ab5f service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Neutron deleted interface 267bd057-e537-4d4f-a050-8b9b176c0786; detaching it from the instance and deleting it from the info cache [ 837.493838] env[62923]: DEBUG nova.network.neutron [req-c6573939-600d-497f-b190-51d8c79b615b req-2e062755-9f06-472d-989f-3a0589b8ab5f service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.546818] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a081a3de-7d59-4a97-b480-6a8056237dd5 tempest-ServerShowV254Test-1899134332 tempest-ServerShowV254Test-1899134332-project-member] Lock "7c98c50a-e7c7-4430-b5c6-dec88a78c397" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.383s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.616976] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 837.650504] env[62923]: INFO nova.compute.manager [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Took 37.59 seconds to build instance. [ 837.718382] env[62923]: DEBUG oslo_vmware.api [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369929, 'name': ResetVM_Task, 'duration_secs': 0.084048} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.718646] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Did hard reboot of VM {{(pid=62923) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 837.718873] env[62923]: DEBUG nova.compute.manager [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 837.719625] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5383f2d5-9106-4b0f-8e7d-9148ee1381f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.931917] env[62923]: DEBUG nova.network.neutron [-] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.939999] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.940172] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.940801] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.999434] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbae3877-a41b-4d1e-af77-9e82853d26c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.009779] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e366e026-419f-476c-b2c3-37c8a6d24d89 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.035228] env[62923]: DEBUG nova.compute.manager [req-c6573939-600d-497f-b190-51d8c79b615b req-2e062755-9f06-472d-989f-3a0589b8ab5f service nova] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Detach interface failed, port_id=267bd057-e537-4d4f-a050-8b9b176c0786, reason: Instance 6fa4d8a8-093f-4ae8-9148-f15f5bf98944 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 838.144799] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.153213] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e4d8f1ba-1dfd-4e53-a198-815ec8362a45 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.682s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.241297] env[62923]: DEBUG oslo_concurrency.lockutils [None req-97b55061-b96c-487d-be8b-661f1f850d1e tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.364s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.296216] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef253280-20a5-48eb-aa6a-a44950e1901e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.304686] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687cc3db-4dc3-4d78-9c1c-b3fcb236d3a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.336511] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f6b9b3-4859-4e9a-be65-742d3f01aeb1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.343784] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a2c0c7-aa21-4389-94fc-88d74d3d0697 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.356712] env[62923]: DEBUG nova.compute.provider_tree [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.395168] env[62923]: INFO nova.scheduler.client.report [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Deleted allocations for instance 08d39755-f94c-45aa-bfb5-f179e8a370db [ 838.433944] env[62923]: INFO nova.compute.manager [-] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Took 1.31 seconds to deallocate network for instance. [ 838.443833] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.659103] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 838.868161] env[62923]: DEBUG nova.scheduler.client.report [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 838.903682] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0a56c12e-aef1-4f3c-a985-d8cd8f15d638 tempest-ServersTestFqdnHostnames-405385759 tempest-ServersTestFqdnHostnames-405385759-project-member] Lock "08d39755-f94c-45aa-bfb5-f179e8a370db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.456s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.940607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.195292] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.375275] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.375793] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 839.379146] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.730s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.380019] env[62923]: INFO nova.compute.claims [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 839.406600] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 839.527897] env[62923]: DEBUG nova.compute.manager [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Received event network-changed-ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 839.528094] env[62923]: DEBUG nova.compute.manager [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Refreshing instance network info cache due to event network-changed-ba4b7bed-fcd3-414c-849f-c9687d3dd490. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 839.528307] env[62923]: DEBUG oslo_concurrency.lockutils [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] Acquiring lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.528443] env[62923]: DEBUG oslo_concurrency.lockutils [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] Acquired lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.528591] env[62923]: DEBUG nova.network.neutron [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Refreshing network info cache for port ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.687445] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.688523] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.688523] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.688523] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.688523] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.694116] env[62923]: INFO nova.compute.manager [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Terminating instance [ 839.696230] env[62923]: DEBUG nova.compute.manager [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Received event network-changed-1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 839.696381] env[62923]: DEBUG nova.compute.manager [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Refreshing instance network info cache due to event network-changed-1fb7d101-34b0-45db-b473-84c94e4b9aaa. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 839.697124] env[62923]: DEBUG oslo_concurrency.lockutils [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.697124] env[62923]: DEBUG oslo_concurrency.lockutils [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.697124] env[62923]: DEBUG nova.network.neutron [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Refreshing network info cache for port 1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.699136] env[62923]: DEBUG nova.compute.manager [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 839.699349] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 839.700196] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8a62cf-bda2-458f-b1ad-dc2e18a89bbc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.708428] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.708670] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6081fc31-f928-41d7-8ace-f30c3763637e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.716766] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 839.716766] env[62923]: value = "task-1369930" [ 839.716766] env[62923]: _type = "Task" [ 839.716766] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.725780] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.890149] env[62923]: DEBUG nova.compute.utils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 839.890149] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 839.890348] env[62923]: DEBUG nova.network.neutron [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.949650] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.986860] env[62923]: DEBUG nova.policy [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab919c24ddc84cdc92dd547d468fd45b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c1729b948504ceb92f67c7e1a21ca21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 840.227865] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369930, 'name': PowerOffVM_Task, 'duration_secs': 0.273042} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.228180] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.228353] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 840.229661] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2ec35ec-acae-4a03-819b-86333c135779 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.291777] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 840.292150] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 840.292239] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Deleting the datastore file [datastore1] 81cca322-c1a0-4fbd-8013-0e4a4694ecfd {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 840.292738] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f88dab8f-ef96-4a09-9240-bbcf8aaf2ead {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.300248] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for the task: (returnval){ [ 840.300248] env[62923]: value = "task-1369932" [ 840.300248] env[62923]: _type = "Task" [ 840.300248] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.313218] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.394399] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 840.623797] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89c33de-b6de-4b96-8f1b-04028e72c437 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.634493] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac9c207-094e-4443-99ce-e59a8c20c292 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.672035] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868ac7fc-1b90-42bd-8d40-80eca6b61b70 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.680179] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53854131-b08d-4ce2-b671-ec79ea91f3c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.694645] env[62923]: DEBUG nova.compute.provider_tree [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.777420] env[62923]: DEBUG nova.network.neutron [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updated VIF entry in instance network info cache for port ba4b7bed-fcd3-414c-849f-c9687d3dd490. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.778100] env[62923]: DEBUG nova.network.neutron [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [{"id": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "address": "fa:16:3e:8a:0d:06", "network": {"id": "4f1191c4-7834-47c1-9c12-06d257cf913f", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-828952864-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d013513ad708456f9a827c8d4974beec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba4b7bed-fc", "ovs_interfaceid": "ba4b7bed-fcd3-414c-849f-c9687d3dd490", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.811958] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.922582] env[62923]: DEBUG nova.network.neutron [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Successfully created port: 7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.016555] env[62923]: DEBUG nova.network.neutron [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updated VIF entry in instance network info cache for port 1fb7d101-34b0-45db-b473-84c94e4b9aaa. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.016902] env[62923]: DEBUG nova.network.neutron [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.198523] env[62923]: DEBUG nova.scheduler.client.report [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 841.283680] env[62923]: DEBUG oslo_concurrency.lockutils [req-2822da0b-6576-4073-8d6a-cd82e0fe1e3d req-dc109734-7d95-4e9a-ba22-3ffef10e8bf3 service nova] Releasing lock "refresh_cache-81cca322-c1a0-4fbd-8013-0e4a4694ecfd" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.310918] env[62923]: DEBUG oslo_vmware.api [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Task: {'id': task-1369932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.673179} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.311189] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 841.311372] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 841.311962] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 841.311962] env[62923]: INFO nova.compute.manager [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Took 1.61 seconds to destroy the instance on the hypervisor. [ 841.311962] env[62923]: DEBUG oslo.service.loopingcall [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.312173] env[62923]: DEBUG nova.compute.manager [-] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 841.312293] env[62923]: DEBUG nova.network.neutron [-] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 841.406634] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 841.438165] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 841.438165] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 841.438309] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.438482] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 841.441567] env[62923]: DEBUG nova.virt.hardware [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 841.441567] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf4f958-0bc2-4059-b3ee-7c2ebcd9d05e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.448977] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528a8362-7288-454a-b916-7417bfc3a19c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.522898] env[62923]: DEBUG oslo_concurrency.lockutils [req-0710a87c-ffb7-4eb3-9317-c428466dedb6 req-fbee0f00-5e75-467d-93d4-cfb754b519e4 service nova] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.645256] env[62923]: DEBUG nova.compute.manager [req-b348d699-9320-4d3c-989c-42b6efb0c809 req-fbf92b74-c3c3-4669-a0f8-4c1178696525 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Received event network-vif-deleted-ba4b7bed-fcd3-414c-849f-c9687d3dd490 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.645343] env[62923]: INFO nova.compute.manager [req-b348d699-9320-4d3c-989c-42b6efb0c809 req-fbf92b74-c3c3-4669-a0f8-4c1178696525 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Neutron deleted interface ba4b7bed-fcd3-414c-849f-c9687d3dd490; detaching it from the instance and deleting it from the info cache [ 841.645503] env[62923]: DEBUG nova.network.neutron [req-b348d699-9320-4d3c-989c-42b6efb0c809 req-fbf92b74-c3c3-4669-a0f8-4c1178696525 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.707863] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.708403] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 841.711832] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.385s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.713398] env[62923]: INFO nova.compute.claims [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.037202] env[62923]: DEBUG nova.network.neutron [-] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.150326] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44e6ca01-6ec4-4465-863e-54f580edc13c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.161320] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81ca2f3-c85d-4013-b832-330db7b5233d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.186398] env[62923]: DEBUG nova.compute.manager [req-b348d699-9320-4d3c-989c-42b6efb0c809 req-fbf92b74-c3c3-4669-a0f8-4c1178696525 service nova] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Detach interface failed, port_id=ba4b7bed-fcd3-414c-849f-c9687d3dd490, reason: Instance 81cca322-c1a0-4fbd-8013-0e4a4694ecfd could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 842.214686] env[62923]: DEBUG nova.compute.utils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 842.216268] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 842.216268] env[62923]: DEBUG nova.network.neutron [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 842.276560] env[62923]: DEBUG nova.policy [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ad76ea94b62472fa3318cbbdb308ebe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1559d2844647aba922cae8e9d992e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 842.539883] env[62923]: INFO nova.compute.manager [-] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Took 1.23 seconds to deallocate network for instance. [ 842.705590] env[62923]: DEBUG nova.network.neutron [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Successfully created port: 96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.719097] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 842.750939] env[62923]: DEBUG nova.network.neutron [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Successfully updated port: 7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.980022] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99cbbaba-eaee-4f72-b37e-72a09fecf621 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.987790] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39fea30-96ef-407a-a08d-5dc93f82b42c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.024652] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5d10fa-3285-4051-a44d-3d710cbebe86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.032333] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45324254-40e7-437e-ac4d-c17bca9be73f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.046402] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.046875] env[62923]: DEBUG nova.compute.provider_tree [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.257731] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "refresh_cache-6cf594e3-e4a6-45f5-b8d2-06db1c200042" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.257891] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquired lock "refresh_cache-6cf594e3-e4a6-45f5-b8d2-06db1c200042" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.258179] env[62923]: DEBUG nova.network.neutron [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.550769] env[62923]: DEBUG nova.scheduler.client.report [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 843.673572] env[62923]: DEBUG nova.compute.manager [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Received event network-vif-plugged-7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.673761] env[62923]: DEBUG oslo_concurrency.lockutils [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] Acquiring lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.673965] env[62923]: DEBUG oslo_concurrency.lockutils [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.674568] env[62923]: DEBUG oslo_concurrency.lockutils [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.674886] env[62923]: DEBUG nova.compute.manager [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] No waiting events found dispatching network-vif-plugged-7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 843.675133] env[62923]: WARNING nova.compute.manager [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Received unexpected event network-vif-plugged-7e696989-aebf-4820-8c47-eadc1b780de7 for instance with vm_state building and task_state spawning. [ 843.675337] env[62923]: DEBUG nova.compute.manager [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Received event network-changed-7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.675528] env[62923]: DEBUG nova.compute.manager [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Refreshing instance network info cache due to event network-changed-7e696989-aebf-4820-8c47-eadc1b780de7. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 843.675790] env[62923]: DEBUG oslo_concurrency.lockutils [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] Acquiring lock "refresh_cache-6cf594e3-e4a6-45f5-b8d2-06db1c200042" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.735245] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.778023] env[62923]: DEBUG nova.virt.hardware [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.778023] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fa966d-9a74-4e92-9427-68934c118a9d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.787552] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723a8874-4b5a-4c31-bc4e-57668a96f8de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.815946] env[62923]: DEBUG nova.network.neutron [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.006264] env[62923]: DEBUG nova.network.neutron [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Updating instance_info_cache with network_info: [{"id": "7e696989-aebf-4820-8c47-eadc1b780de7", "address": "fa:16:3e:e0:11:79", "network": {"id": "97152fbc-6cd6-4e1e-a822-b0e6120fc663", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-417660842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c1729b948504ceb92f67c7e1a21ca21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e696989-ae", "ovs_interfaceid": "7e696989-aebf-4820-8c47-eadc1b780de7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.057517] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.057517] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.060929] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.301s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.061420] env[62923]: INFO nova.compute.claims [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.396891] env[62923]: DEBUG nova.network.neutron [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Successfully updated port: 96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.510663] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Releasing lock "refresh_cache-6cf594e3-e4a6-45f5-b8d2-06db1c200042" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.510663] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Instance network_info: |[{"id": "7e696989-aebf-4820-8c47-eadc1b780de7", "address": "fa:16:3e:e0:11:79", "network": {"id": "97152fbc-6cd6-4e1e-a822-b0e6120fc663", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-417660842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c1729b948504ceb92f67c7e1a21ca21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e696989-ae", "ovs_interfaceid": "7e696989-aebf-4820-8c47-eadc1b780de7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 844.510663] env[62923]: DEBUG oslo_concurrency.lockutils [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] Acquired lock "refresh_cache-6cf594e3-e4a6-45f5-b8d2-06db1c200042" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.510663] env[62923]: DEBUG nova.network.neutron [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Refreshing network info cache for port 7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.511408] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:11:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e696989-aebf-4820-8c47-eadc1b780de7', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.520115] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Creating folder: Project (9c1729b948504ceb92f67c7e1a21ca21). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.527055] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa8fe22c-adf9-4178-907e-4ebfd814812d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.542186] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Created folder: Project (9c1729b948504ceb92f67c7e1a21ca21) in parent group-v291405. [ 844.542186] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Creating folder: Instances. Parent ref: group-v291434. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.542186] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74414bde-7a2a-4646-8bd0-220a5fe33071 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.553887] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Created folder: Instances in parent group-v291434. [ 844.554224] env[62923]: DEBUG oslo.service.loopingcall [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.554465] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.554709] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-879db287-c446-47a5-9b2e-5c3be2707047 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.573436] env[62923]: DEBUG nova.compute.utils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 844.577348] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 844.577348] env[62923]: DEBUG nova.network.neutron [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.583975] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.583975] env[62923]: value = "task-1369935" [ 844.583975] env[62923]: _type = "Task" [ 844.583975] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.593015] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369935, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.641626] env[62923]: DEBUG nova.policy [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 844.899988] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "refresh_cache-a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.900349] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "refresh_cache-a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.900473] env[62923]: DEBUG nova.network.neutron [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.973922] env[62923]: DEBUG nova.network.neutron [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Updated VIF entry in instance network info cache for port 7e696989-aebf-4820-8c47-eadc1b780de7. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.974309] env[62923]: DEBUG nova.network.neutron [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Updating instance_info_cache with network_info: [{"id": "7e696989-aebf-4820-8c47-eadc1b780de7", "address": "fa:16:3e:e0:11:79", "network": {"id": "97152fbc-6cd6-4e1e-a822-b0e6120fc663", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-417660842-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c1729b948504ceb92f67c7e1a21ca21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e696989-ae", "ovs_interfaceid": "7e696989-aebf-4820-8c47-eadc1b780de7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.077612] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.100882] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369935, 'name': CreateVM_Task, 'duration_secs': 0.375252} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.101071] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.101787] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.102601] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.102601] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.102692] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff4db04-6bcf-4be6-9f70-35a9f4d9c1f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.109757] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 845.109757] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5248e36d-f257-5748-7a78-73367a102748" [ 845.109757] env[62923]: _type = "Task" [ 845.109757] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.120568] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5248e36d-f257-5748-7a78-73367a102748, 'name': SearchDatastore_Task, 'duration_secs': 0.008837} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.124204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.124492] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.124762] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.125012] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.125309] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.132420] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8587e960-302d-4440-8768-7c21caf6f9f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.138656] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.138656] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.140524] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab119a82-ed2d-401a-9340-09cbf688fa61 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.150790] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 845.150790] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aefa51-a98c-5c9c-1814-cab3525a683f" [ 845.150790] env[62923]: _type = "Task" [ 845.150790] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.160195] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aefa51-a98c-5c9c-1814-cab3525a683f, 'name': SearchDatastore_Task, 'duration_secs': 0.009185} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.164188] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00ff02b5-cd01-4234-a853-416ad25329f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.170561] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 845.170561] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525aa8c2-8918-5249-cd07-de4d42de0c9f" [ 845.170561] env[62923]: _type = "Task" [ 845.170561] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.185114] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525aa8c2-8918-5249-cd07-de4d42de0c9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.248163] env[62923]: DEBUG nova.network.neutron [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Successfully created port: 27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.366094] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5983afa6-19e1-41bc-a872-fb4ab4fc6274 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.374327] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e4f0fb-611a-4d42-b9e0-b121f08f8686 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.410934] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da016579-fdaa-48ec-8edd-4cce12583d88 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.422055] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8b7270-83c8-44d9-813a-ef960dabbc0a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.437632] env[62923]: DEBUG nova.compute.provider_tree [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.477927] env[62923]: DEBUG oslo_concurrency.lockutils [req-f577a888-f9a8-4268-8cdb-b54dc0019055 req-b86daea8-53bb-49c1-bac4-c5837057f685 service nova] Releasing lock "refresh_cache-6cf594e3-e4a6-45f5-b8d2-06db1c200042" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.483683] env[62923]: DEBUG nova.network.neutron [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.680864] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525aa8c2-8918-5249-cd07-de4d42de0c9f, 'name': SearchDatastore_Task, 'duration_secs': 0.008473} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.681088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.681344] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 6cf594e3-e4a6-45f5-b8d2-06db1c200042/6cf594e3-e4a6-45f5-b8d2-06db1c200042.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.681592] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b425820-9b2f-4743-b141-2006c39978c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.691578] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 845.691578] env[62923]: value = "task-1369936" [ 845.691578] env[62923]: _type = "Task" [ 845.691578] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.699983] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.881892] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "534fa654-ed73-4518-bdc7-d1f981628fd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.882143] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.942695] env[62923]: DEBUG nova.scheduler.client.report [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.968456] env[62923]: DEBUG nova.compute.manager [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Received event network-vif-plugged-96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.968779] env[62923]: DEBUG oslo_concurrency.lockutils [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] Acquiring lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.969388] env[62923]: DEBUG oslo_concurrency.lockutils [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.969484] env[62923]: DEBUG oslo_concurrency.lockutils [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.969750] env[62923]: DEBUG nova.compute.manager [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] No waiting events found dispatching network-vif-plugged-96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 845.969985] env[62923]: WARNING nova.compute.manager [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Received unexpected event network-vif-plugged-96266d56-2661-429a-aa45-a2015a285f2a for instance with vm_state building and task_state spawning. [ 845.970258] env[62923]: DEBUG nova.compute.manager [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Received event network-changed-96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.970514] env[62923]: DEBUG nova.compute.manager [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Refreshing instance network info cache due to event network-changed-96266d56-2661-429a-aa45-a2015a285f2a. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 845.970747] env[62923]: DEBUG oslo_concurrency.lockutils [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] Acquiring lock "refresh_cache-a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.982589] env[62923]: DEBUG nova.network.neutron [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Updating instance_info_cache with network_info: [{"id": "96266d56-2661-429a-aa45-a2015a285f2a", "address": "fa:16:3e:db:b6:81", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96266d56-26", "ovs_interfaceid": "96266d56-2661-429a-aa45-a2015a285f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.089548] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.120726] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.120938] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.121127] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.121291] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.121432] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.121573] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.121775] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.121930] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.122103] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.122265] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.122419] env[62923]: DEBUG nova.virt.hardware [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.123395] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47e60cf-ffd1-4462-8c3c-bd4d3112448d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.135227] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afed4d25-da0d-4f9a-aeb3-54ade2878527 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.201204] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369936, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.455217] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.455734] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 846.458529] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.066s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.460185] env[62923]: INFO nova.compute.claims [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.485164] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "refresh_cache-a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.485751] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Instance network_info: |[{"id": "96266d56-2661-429a-aa45-a2015a285f2a", "address": "fa:16:3e:db:b6:81", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96266d56-26", "ovs_interfaceid": "96266d56-2661-429a-aa45-a2015a285f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 846.485904] env[62923]: DEBUG oslo_concurrency.lockutils [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] Acquired lock "refresh_cache-a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.486178] env[62923]: DEBUG nova.network.neutron [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Refreshing network info cache for port 96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.487267] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:b6:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96266d56-2661-429a-aa45-a2015a285f2a', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.494867] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating folder: Project (2d1559d2844647aba922cae8e9d992e6). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.495583] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4008915c-d74d-418b-9e27-e9b8f9e01c60 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.507196] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created folder: Project (2d1559d2844647aba922cae8e9d992e6) in parent group-v291405. [ 846.507345] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating folder: Instances. Parent ref: group-v291437. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.507785] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca889c4f-8f68-4aaa-8781-0bdb49e8bd5b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.516366] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created folder: Instances in parent group-v291437. [ 846.516583] env[62923]: DEBUG oslo.service.loopingcall [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.516758] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.516958] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76505e46-aab4-4535-ac7b-deaf8934de36 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.535682] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.535682] env[62923]: value = "task-1369939" [ 846.535682] env[62923]: _type = "Task" [ 846.535682] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.544493] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369939, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.701528] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369936, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.964453] env[62923]: DEBUG nova.compute.utils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 846.967874] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 846.967874] env[62923]: DEBUG nova.network.neutron [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.045097] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369939, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.046547] env[62923]: DEBUG nova.policy [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c654b8365f5543f3bf713f3f5aa00654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a654d46357ed49cd95460a56926f102a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 847.201937] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369936, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.468638] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 847.547954] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369939, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.556580] env[62923]: DEBUG nova.network.neutron [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Updated VIF entry in instance network info cache for port 96266d56-2661-429a-aa45-a2015a285f2a. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.556931] env[62923]: DEBUG nova.network.neutron [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Updating instance_info_cache with network_info: [{"id": "96266d56-2661-429a-aa45-a2015a285f2a", "address": "fa:16:3e:db:b6:81", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96266d56-26", "ovs_interfaceid": "96266d56-2661-429a-aa45-a2015a285f2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.705123] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369936, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.602498} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.706601] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 6cf594e3-e4a6-45f5-b8d2-06db1c200042/6cf594e3-e4a6-45f5-b8d2-06db1c200042.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.706601] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.706601] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47f964a8-62a5-4dab-821a-88586346cc17 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.714236] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 847.714236] env[62923]: value = "task-1369940" [ 847.714236] env[62923]: _type = "Task" [ 847.714236] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.722957] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.732414] env[62923]: DEBUG nova.network.neutron [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Successfully created port: 9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.735639] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7a5d8b-92ac-4bd4-b6ea-f50edf387650 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.744739] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e41b1a-782d-4dda-b88a-a3ab42fa7f1f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.748861] env[62923]: DEBUG nova.network.neutron [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Successfully updated port: 27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.780913] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bacda7-627c-491b-9fd4-d158a4b68464 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.787909] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23aa6a7-38d1-4d89-966d-9cbb579796c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.801188] env[62923]: DEBUG nova.compute.provider_tree [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.029238] env[62923]: DEBUG nova.compute.manager [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Received event network-vif-plugged-27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.029238] env[62923]: DEBUG oslo_concurrency.lockutils [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] Acquiring lock "7c3edceb-cc58-4925-a97a-3204936c836d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.029238] env[62923]: DEBUG oslo_concurrency.lockutils [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] Lock "7c3edceb-cc58-4925-a97a-3204936c836d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.029574] env[62923]: DEBUG oslo_concurrency.lockutils [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] Lock "7c3edceb-cc58-4925-a97a-3204936c836d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.029574] env[62923]: DEBUG nova.compute.manager [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] No waiting events found dispatching network-vif-plugged-27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 848.029692] env[62923]: WARNING nova.compute.manager [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Received unexpected event network-vif-plugged-27bb1fc5-45d6-4beb-a8b8-7c3817de011c for instance with vm_state building and task_state spawning. [ 848.029846] env[62923]: DEBUG nova.compute.manager [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Received event network-changed-27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.029996] env[62923]: DEBUG nova.compute.manager [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Refreshing instance network info cache due to event network-changed-27bb1fc5-45d6-4beb-a8b8-7c3817de011c. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 848.031211] env[62923]: DEBUG oslo_concurrency.lockutils [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] Acquiring lock "refresh_cache-7c3edceb-cc58-4925-a97a-3204936c836d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.031458] env[62923]: DEBUG oslo_concurrency.lockutils [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] Acquired lock "refresh_cache-7c3edceb-cc58-4925-a97a-3204936c836d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.031682] env[62923]: DEBUG nova.network.neutron [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Refreshing network info cache for port 27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.050690] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369939, 'name': CreateVM_Task, 'duration_secs': 1.127422} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.050690] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.051381] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.051546] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.051924] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.052332] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01eced5b-eba4-4a06-87e0-2864d8592d01 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.057364] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 848.057364] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52025d16-23d7-225e-79d8-945ae2e5e603" [ 848.057364] env[62923]: _type = "Task" [ 848.057364] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.061339] env[62923]: DEBUG oslo_concurrency.lockutils [req-aa4215fd-a65b-47e6-b1fc-7e23f029c04b req-1d5e7ecd-b7c9-478e-a2c0-b33225698cc0 service nova] Releasing lock "refresh_cache-a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.066142] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52025d16-23d7-225e-79d8-945ae2e5e603, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.223930] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05378} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.224437] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.225340] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7646f12e-a96b-45c8-bd23-5e59d38241dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.246816] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 6cf594e3-e4a6-45f5-b8d2-06db1c200042/6cf594e3-e4a6-45f5-b8d2-06db1c200042.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.247092] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b93a36ca-76ef-43be-9b58-207b9e629918 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.266468] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 848.266468] env[62923]: value = "task-1369941" [ 848.266468] env[62923]: _type = "Task" [ 848.266468] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.274032] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369941, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.279611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-7c3edceb-cc58-4925-a97a-3204936c836d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.304476] env[62923]: DEBUG nova.scheduler.client.report [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.480932] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 848.505538] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 848.505770] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 848.505948] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.506157] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 848.506300] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.506438] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 848.506639] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 848.506791] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 848.506947] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 848.507115] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 848.507306] env[62923]: DEBUG nova.virt.hardware [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 848.508212] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63441770-0364-4904-8ca1-ccc0945da4d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.517112] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9e0d82-c63c-4ea7-9394-110ae4dfa961 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.566599] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52025d16-23d7-225e-79d8-945ae2e5e603, 'name': SearchDatastore_Task, 'duration_secs': 0.009401} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.566879] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.567121] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.567386] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.567531] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.567705] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.567953] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddffc4b6-904e-49fe-bbc9-8164bcd86217 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.575517] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.575675] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.576297] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53ce69b3-b99a-4ce0-af1f-727e113dfdd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.581400] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 848.581400] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f38c2a-c3bb-78f6-2fb9-2a9e8493f439" [ 848.581400] env[62923]: _type = "Task" [ 848.581400] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.582154] env[62923]: DEBUG nova.network.neutron [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.591292] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f38c2a-c3bb-78f6-2fb9-2a9e8493f439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.723223] env[62923]: DEBUG nova.network.neutron [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.776715] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369941, 'name': ReconfigVM_Task, 'duration_secs': 0.247299} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.776992] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 6cf594e3-e4a6-45f5-b8d2-06db1c200042/6cf594e3-e4a6-45f5-b8d2-06db1c200042.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.777620] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-526110fa-3d62-453e-bbd7-b9395211fd0f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.784013] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 848.784013] env[62923]: value = "task-1369942" [ 848.784013] env[62923]: _type = "Task" [ 848.784013] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.791997] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369942, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.809359] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.809847] env[62923]: DEBUG nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 848.812782] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.887s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.813987] env[62923]: DEBUG nova.objects.instance [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lazy-loading 'resources' on Instance uuid 98974fb7-049a-4c72-a352-bc0a50d2a879 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.093618] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f38c2a-c3bb-78f6-2fb9-2a9e8493f439, 'name': SearchDatastore_Task, 'duration_secs': 0.008935} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.094408] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-782e1f30-e30c-46f4-ada8-336dbecd7681 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.099516] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 849.099516] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f886ec-5ef4-72e6-659c-691d4739d424" [ 849.099516] env[62923]: _type = "Task" [ 849.099516] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.107232] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f886ec-5ef4-72e6-659c-691d4739d424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.226991] env[62923]: DEBUG oslo_concurrency.lockutils [req-a8d7b7a5-2e3a-42dd-88de-25f69959c16e req-6e6f9b73-b07b-4605-949b-6684bc5ba892 service nova] Releasing lock "refresh_cache-7c3edceb-cc58-4925-a97a-3204936c836d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.226991] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-7c3edceb-cc58-4925-a97a-3204936c836d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.226991] env[62923]: DEBUG nova.network.neutron [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.294320] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369942, 'name': Rename_Task, 'duration_secs': 0.134609} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.294429] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.294818] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85e51d43-f4ad-40d2-87e2-69738036a5f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.305562] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 849.305562] env[62923]: value = "task-1369943" [ 849.305562] env[62923]: _type = "Task" [ 849.305562] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.311595] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.315259] env[62923]: DEBUG nova.compute.utils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.319673] env[62923]: DEBUG nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 849.438087] env[62923]: DEBUG nova.network.neutron [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Successfully updated port: 9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.548442] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1968b8f6-e981-4a5e-b8ed-848cc1746687 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.556340] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bffd94d-6325-4508-9ca5-f1a87c640e32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.589256] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5af1b5-e960-45a9-9d5a-f3167928bdac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.596652] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a2ed5b-f18e-48c9-a28a-4143b3fe3acb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.611961] env[62923]: DEBUG nova.compute.provider_tree [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.616553] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f886ec-5ef4-72e6-659c-691d4739d424, 'name': SearchDatastore_Task, 'duration_secs': 0.008782} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.616856] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.617115] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] a616c7f0-8c39-4c08-a1a4-1d89e158d3c5/a616c7f0-8c39-4c08-a1a4-1d89e158d3c5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.617349] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4969fca6-f55f-41c1-8891-4b53bc66793b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.623612] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 849.623612] env[62923]: value = "task-1369944" [ 849.623612] env[62923]: _type = "Task" [ 849.623612] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.632204] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.786365] env[62923]: DEBUG nova.network.neutron [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 849.824521] env[62923]: DEBUG nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 849.832030] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369943, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.940460] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.940610] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.940765] env[62923]: DEBUG nova.network.neutron [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.042168] env[62923]: DEBUG nova.network.neutron [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Updating instance_info_cache with network_info: [{"id": "27bb1fc5-45d6-4beb-a8b8-7c3817de011c", "address": "fa:16:3e:1a:c3:fd", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb1fc5-45", "ovs_interfaceid": "27bb1fc5-45d6-4beb-a8b8-7c3817de011c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.057822] env[62923]: DEBUG nova.compute.manager [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Received event network-vif-plugged-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.058059] env[62923]: DEBUG oslo_concurrency.lockutils [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] Acquiring lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.058261] env[62923]: DEBUG oslo_concurrency.lockutils [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.058475] env[62923]: DEBUG oslo_concurrency.lockutils [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.058575] env[62923]: DEBUG nova.compute.manager [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] No waiting events found dispatching network-vif-plugged-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 850.058759] env[62923]: WARNING nova.compute.manager [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Received unexpected event network-vif-plugged-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 for instance with vm_state building and task_state spawning. [ 850.058878] env[62923]: DEBUG nova.compute.manager [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Received event network-changed-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 850.059197] env[62923]: DEBUG nova.compute.manager [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Refreshing instance network info cache due to event network-changed-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 850.059367] env[62923]: DEBUG oslo_concurrency.lockutils [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] Acquiring lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.117938] env[62923]: DEBUG nova.scheduler.client.report [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.133979] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369944, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4501} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.134832] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] a616c7f0-8c39-4c08-a1a4-1d89e158d3c5/a616c7f0-8c39-4c08-a1a4-1d89e158d3c5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.135094] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.135357] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3bdbadd-4519-471f-b7bd-d304e7092d34 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.141953] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 850.141953] env[62923]: value = "task-1369945" [ 850.141953] env[62923]: _type = "Task" [ 850.141953] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.149879] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.313407] env[62923]: DEBUG oslo_vmware.api [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369943, 'name': PowerOnVM_Task, 'duration_secs': 0.849284} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.314732] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.314732] env[62923]: INFO nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Took 8.91 seconds to spawn the instance on the hypervisor. [ 850.314732] env[62923]: DEBUG nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 850.315052] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f563dcc2-a56c-4043-a6e8-07b95f333be2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.468290] env[62923]: DEBUG nova.network.neutron [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.544946] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-7c3edceb-cc58-4925-a97a-3204936c836d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.545284] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Instance network_info: |[{"id": "27bb1fc5-45d6-4beb-a8b8-7c3817de011c", "address": "fa:16:3e:1a:c3:fd", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27bb1fc5-45", "ovs_interfaceid": "27bb1fc5-45d6-4beb-a8b8-7c3817de011c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 850.545681] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:c3:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27bb1fc5-45d6-4beb-a8b8-7c3817de011c', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 850.553009] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating folder: Project (2948b6c7e6f04cf98b36777c2fc94fc1). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.553276] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27b9b64c-fe3f-4946-aadb-87414fa16cf9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.563134] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created folder: Project (2948b6c7e6f04cf98b36777c2fc94fc1) in parent group-v291405. [ 850.563311] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating folder: Instances. Parent ref: group-v291440. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.563529] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e877274a-fc34-4a1c-a8e0-b284c1530d39 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.571719] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created folder: Instances in parent group-v291440. [ 850.571935] env[62923]: DEBUG oslo.service.loopingcall [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.573917] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 850.574136] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7347c6b-a92a-49a2-ad22-0a30e212637d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.592334] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.592334] env[62923]: value = "task-1369948" [ 850.592334] env[62923]: _type = "Task" [ 850.592334] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.599298] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369948, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.623282] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.625523] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.428s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.627118] env[62923]: INFO nova.compute.claims [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.630446] env[62923]: DEBUG nova.network.neutron [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updating instance_info_cache with network_info: [{"id": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "address": "fa:16:3e:f0:eb:fd", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbeeed0-1b", "ovs_interfaceid": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.648814] env[62923]: INFO nova.scheduler.client.report [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Deleted allocations for instance 98974fb7-049a-4c72-a352-bc0a50d2a879 [ 850.653019] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073384} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.655424] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.656400] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86b7b40-21c7-42b5-8266-d67e1a3e7634 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.680640] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] a616c7f0-8c39-4c08-a1a4-1d89e158d3c5/a616c7f0-8c39-4c08-a1a4-1d89e158d3c5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.680956] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fd2589f-1a7d-4a3c-a57e-fe90f759d788 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.700621] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 850.700621] env[62923]: value = "task-1369949" [ 850.700621] env[62923]: _type = "Task" [ 850.700621] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.710442] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.833996] env[62923]: INFO nova.compute.manager [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Took 39.45 seconds to build instance. [ 850.836195] env[62923]: DEBUG nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 850.861914] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 850.862249] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 850.862441] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.862634] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 850.862780] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.862951] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 850.863182] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 850.863341] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 850.863505] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 850.863670] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 850.863822] env[62923]: DEBUG nova.virt.hardware [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.864700] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b55766-ccf2-42af-9e67-b595d9e20406 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.873378] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034163e6-fcd3-483f-8667-8d5750fdef90 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.887761] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 850.893129] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Creating folder: Project (88a5c6d4260c473b81b8d0bdb699b228). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.893439] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8e9a66f-71de-429f-a2c8-b23bf0f48314 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.903224] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Created folder: Project (88a5c6d4260c473b81b8d0bdb699b228) in parent group-v291405. [ 850.903433] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Creating folder: Instances. Parent ref: group-v291443. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 850.903662] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d38749c-2ff6-4304-ae6c-154da9f427c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.911954] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Created folder: Instances in parent group-v291443. [ 850.912209] env[62923]: DEBUG oslo.service.loopingcall [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.912396] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 850.912590] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4ea1c11-ca32-401b-bbee-b6264f8c0ec4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.929396] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 850.929396] env[62923]: value = "task-1369952" [ 850.929396] env[62923]: _type = "Task" [ 850.929396] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.937036] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369952, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.102686] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369948, 'name': CreateVM_Task, 'duration_secs': 0.349734} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.102836] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 851.103550] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.103717] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.104075] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 851.104358] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80be9da2-5b90-4363-8c2c-5085ef487d14 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.109578] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 851.109578] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527acd57-e82b-9779-2a6f-209eb1545546" [ 851.109578] env[62923]: _type = "Task" [ 851.109578] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.118469] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527acd57-e82b-9779-2a6f-209eb1545546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.133267] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.133676] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Instance network_info: |[{"id": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "address": "fa:16:3e:f0:eb:fd", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbeeed0-1b", "ovs_interfaceid": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 851.136169] env[62923]: DEBUG oslo_concurrency.lockutils [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] Acquired lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.136356] env[62923]: DEBUG nova.network.neutron [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Refreshing network info cache for port 9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 851.137407] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:eb:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.146140] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating folder: Project (a654d46357ed49cd95460a56926f102a). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.149752] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc7264b0-3586-4256-8e75-73e99e75466d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.162429] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Created folder: Project (a654d46357ed49cd95460a56926f102a) in parent group-v291405. [ 851.162609] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating folder: Instances. Parent ref: group-v291446. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.163076] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef05bf68-2afb-4970-a02c-b3a2dff9c396 tempest-ServersTestMultiNic-5523554 tempest-ServersTestMultiNic-5523554-project-member] Lock "98974fb7-049a-4c72-a352-bc0a50d2a879" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.433s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.163849] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccddbfe0-4291-473e-bd40-103b8c2b3445 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.173139] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Created folder: Instances in parent group-v291446. [ 851.173374] env[62923]: DEBUG oslo.service.loopingcall [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.174041] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.174254] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a8cef66-9114-40a4-b620-a9c559f69940 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.195179] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.195179] env[62923]: value = "task-1369955" [ 851.195179] env[62923]: _type = "Task" [ 851.195179] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.203257] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369955, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.213253] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369949, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.338450] env[62923]: DEBUG oslo_concurrency.lockutils [None req-244d6e33-2b0e-49f2-9cc7-f0e316621c62 tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.684s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.439891] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369952, 'name': CreateVM_Task, 'duration_secs': 0.456279} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.440158] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 851.440752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.440752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.441093] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 851.441396] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9c1f4d-6d60-4c2e-b971-f1a9ca3cb348 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.445942] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 851.445942] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5275792b-b0db-aeb5-1aaa-5741fb86123b" [ 851.445942] env[62923]: _type = "Task" [ 851.445942] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.453558] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5275792b-b0db-aeb5-1aaa-5741fb86123b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.555008] env[62923]: DEBUG nova.network.neutron [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updated VIF entry in instance network info cache for port 9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.555401] env[62923]: DEBUG nova.network.neutron [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updating instance_info_cache with network_info: [{"id": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "address": "fa:16:3e:f0:eb:fd", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbeeed0-1b", "ovs_interfaceid": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.619908] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527acd57-e82b-9779-2a6f-209eb1545546, 'name': SearchDatastore_Task, 'duration_secs': 0.016574} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.620223] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.620441] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 851.620658] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.620795] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.620960] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 851.621224] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebf0f02a-9583-4aff-8262-24f44efd5755 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.629400] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 851.629808] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 851.630321] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec0d9c25-607d-49ce-b8a9-74c951fa4869 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.635878] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 851.635878] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52869115-f160-3c92-07cc-0ba826ae9136" [ 851.635878] env[62923]: _type = "Task" [ 851.635878] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.643463] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52869115-f160-3c92-07cc-0ba826ae9136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.706673] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369955, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.715675] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369949, 'name': ReconfigVM_Task, 'duration_secs': 0.737819} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.715973] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Reconfigured VM instance instance-00000043 to attach disk [datastore1] a616c7f0-8c39-4c08-a1a4-1d89e158d3c5/a616c7f0-8c39-4c08-a1a4-1d89e158d3c5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.717718] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a4e1433-a8af-4801-9594-30e8908ec2bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.728568] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 851.728568] env[62923]: value = "task-1369956" [ 851.728568] env[62923]: _type = "Task" [ 851.728568] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.741151] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369956, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.841311] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 851.912461] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98adc9c-625b-43b6-8843-56fa2fb7713d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.921457] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198c69b8-ba4c-416d-b5aa-b7f3d9b166cd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.958918] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b01c1e-0f55-47c7-873c-a0837c879915 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.966997] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5275792b-b0db-aeb5-1aaa-5741fb86123b, 'name': SearchDatastore_Task, 'duration_secs': 0.041394} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.969008] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.969269] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 851.969502] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.969642] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.969813] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 851.970116] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a4babbf-e014-4716-a443-f777bdbc8f12 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.972626] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805d9f3d-61fe-4db5-9bad-ee8b04e51f37 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.986725] env[62923]: DEBUG nova.compute.provider_tree [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.989898] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 851.989898] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 851.990115] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88cf4d96-57ac-4560-99dc-9bdeb849e4c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.996031] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 851.996031] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521a7657-486a-9fcf-f68f-8c78e6d771d0" [ 851.996031] env[62923]: _type = "Task" [ 851.996031] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.002866] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521a7657-486a-9fcf-f68f-8c78e6d771d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.057654] env[62923]: DEBUG oslo_concurrency.lockutils [req-d16f010c-175c-4c5f-a481-fa380d30d475 req-eb2b650b-0181-440d-94de-6c5473b353f3 service nova] Releasing lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.147075] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52869115-f160-3c92-07cc-0ba826ae9136, 'name': SearchDatastore_Task, 'duration_secs': 0.009368} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.147943] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09f28ab5-b359-4cda-b652-560fd575d42e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.154642] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 852.154642] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abfb0b-857e-b5e6-b68a-a2630f05b2e3" [ 852.154642] env[62923]: _type = "Task" [ 852.154642] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.162929] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abfb0b-857e-b5e6-b68a-a2630f05b2e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.205261] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369955, 'name': CreateVM_Task, 'duration_secs': 0.635535} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.205450] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.206245] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.206403] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.206714] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 852.206970] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b3d18e8-3b2b-48fc-a2f9-62aecd46046d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.211409] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 852.211409] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5265a8bf-25cf-3331-1a4e-3f570c877bcc" [ 852.211409] env[62923]: _type = "Task" [ 852.211409] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.218852] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5265a8bf-25cf-3331-1a4e-3f570c877bcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.222270] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.222478] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.222671] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.222846] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.223059] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.225178] env[62923]: INFO nova.compute.manager [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Terminating instance [ 852.226917] env[62923]: DEBUG nova.compute.manager [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 852.227110] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 852.227847] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b23e9a-623e-4ed4-b072-b7b0f6972969 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.236184] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.236670] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61336eef-3bdf-4067-bf62-dc35148a8dcb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.240793] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369956, 'name': Rename_Task, 'duration_secs': 0.234622} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.241031] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.241237] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8aa2303-8639-4e22-a21a-ab8832b5058c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.243630] env[62923]: DEBUG oslo_vmware.api [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 852.243630] env[62923]: value = "task-1369957" [ 852.243630] env[62923]: _type = "Task" [ 852.243630] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.248266] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 852.248266] env[62923]: value = "task-1369958" [ 852.248266] env[62923]: _type = "Task" [ 852.248266] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.254663] env[62923]: DEBUG oslo_vmware.api [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.261018] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.370897] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.491278] env[62923]: DEBUG nova.scheduler.client.report [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 852.506687] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521a7657-486a-9fcf-f68f-8c78e6d771d0, 'name': SearchDatastore_Task, 'duration_secs': 0.030937} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.507549] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a2fbe55-ac1c-49e6-9e93-7f0f4f7195a2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.513494] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 852.513494] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e246d9-896d-1794-a665-811e3deaa241" [ 852.513494] env[62923]: _type = "Task" [ 852.513494] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.521324] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e246d9-896d-1794-a665-811e3deaa241, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.668469] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abfb0b-857e-b5e6-b68a-a2630f05b2e3, 'name': SearchDatastore_Task, 'duration_secs': 0.018548} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.668770] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.669056] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 7c3edceb-cc58-4925-a97a-3204936c836d/7c3edceb-cc58-4925-a97a-3204936c836d.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 852.669422] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfd5f9e7-01b6-4c8e-adb1-20c2c7840027 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.678622] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 852.678622] env[62923]: value = "task-1369959" [ 852.678622] env[62923]: _type = "Task" [ 852.678622] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.686902] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.721618] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5265a8bf-25cf-3331-1a4e-3f570c877bcc, 'name': SearchDatastore_Task, 'duration_secs': 0.028552} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.721930] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.722339] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 852.722579] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.722724] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.723042] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.723621] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-697c84ab-bc47-4072-a521-f55cdfa1b76b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.732354] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.732537] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 852.733354] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa1aa466-81d6-4f7b-a826-75177b34ef8d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.738848] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 852.738848] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522fbbdd-9c99-6098-bae6-f1fc830396b8" [ 852.738848] env[62923]: _type = "Task" [ 852.738848] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.746699] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522fbbdd-9c99-6098-bae6-f1fc830396b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.757512] env[62923]: DEBUG oslo_vmware.api [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369957, 'name': PowerOffVM_Task, 'duration_secs': 0.24585} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.760638] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.760804] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 852.761073] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369958, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.761592] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b20f5d91-cec0-4b0e-b932-21a445f83558 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.820220] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.820952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.820952] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Deleting the datastore file [datastore1] 6cf594e3-e4a6-45f5-b8d2-06db1c200042 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.820952] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1ecabb6-a1b4-4d37-ab82-6c6d95e43087 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.827839] env[62923]: DEBUG oslo_vmware.api [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for the task: (returnval){ [ 852.827839] env[62923]: value = "task-1369961" [ 852.827839] env[62923]: _type = "Task" [ 852.827839] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.836121] env[62923]: DEBUG oslo_vmware.api [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.996559] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.997093] env[62923]: DEBUG nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 853.000034] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.857s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.001471] env[62923]: INFO nova.compute.claims [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.027151] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e246d9-896d-1794-a665-811e3deaa241, 'name': SearchDatastore_Task, 'duration_secs': 0.015808} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.027497] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.027761] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 853.028411] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3f41d9f-0853-4094-a7cb-d1dd78149528 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.038871] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 853.038871] env[62923]: value = "task-1369962" [ 853.038871] env[62923]: _type = "Task" [ 853.038871] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.054669] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.190577] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369959, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.250410] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522fbbdd-9c99-6098-bae6-f1fc830396b8, 'name': SearchDatastore_Task, 'duration_secs': 0.008088} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.254262] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-481cecb5-28a6-4060-8685-eadcf72a22e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.262444] env[62923]: DEBUG oslo_vmware.api [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369958, 'name': PowerOnVM_Task, 'duration_secs': 0.616207} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.263628] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.263830] env[62923]: INFO nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Took 9.53 seconds to spawn the instance on the hypervisor. [ 853.264026] env[62923]: DEBUG nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 853.264346] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 853.264346] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52090437-d8c0-1743-7299-ce97c1c3767d" [ 853.264346] env[62923]: _type = "Task" [ 853.264346] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.265093] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194702d9-9e9b-4b8d-bbff-b4c0e80e6040 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.279717] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52090437-d8c0-1743-7299-ce97c1c3767d, 'name': SearchDatastore_Task, 'duration_secs': 0.009781} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.280501] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.280501] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 880cce70-5a0c-40a6-91b5-73d074feab6f/880cce70-5a0c-40a6-91b5-73d074feab6f.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 853.280704] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f70c3f6b-d259-4a54-9725-47f582faa535 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.286217] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 853.286217] env[62923]: value = "task-1369963" [ 853.286217] env[62923]: _type = "Task" [ 853.286217] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.293705] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.337357] env[62923]: DEBUG oslo_vmware.api [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Task: {'id': task-1369961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.43135} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.337611] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.337904] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 853.338009] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 853.338144] env[62923]: INFO nova.compute.manager [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Took 1.11 seconds to destroy the instance on the hypervisor. [ 853.338392] env[62923]: DEBUG oslo.service.loopingcall [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.338602] env[62923]: DEBUG nova.compute.manager [-] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 853.338671] env[62923]: DEBUG nova.network.neutron [-] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.506476] env[62923]: DEBUG nova.compute.utils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.510053] env[62923]: DEBUG nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 853.548423] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369962, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.698428] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52401} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.698785] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 7c3edceb-cc58-4925-a97a-3204936c836d/7c3edceb-cc58-4925-a97a-3204936c836d.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 853.699044] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 853.699369] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7ccee41-aca1-4971-8548-432393e9fcfd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.712197] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 853.712197] env[62923]: value = "task-1369964" [ 853.712197] env[62923]: _type = "Task" [ 853.712197] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.723764] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.796103] env[62923]: INFO nova.compute.manager [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Took 27.16 seconds to build instance. [ 853.803803] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369963, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.808112] env[62923]: DEBUG nova.compute.manager [req-4dd4a125-1a95-4a10-bcbc-1d90e7857be0 req-373efce2-592e-4637-9b57-89793512b61c service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Received event network-vif-deleted-7e696989-aebf-4820-8c47-eadc1b780de7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.808378] env[62923]: INFO nova.compute.manager [req-4dd4a125-1a95-4a10-bcbc-1d90e7857be0 req-373efce2-592e-4637-9b57-89793512b61c service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Neutron deleted interface 7e696989-aebf-4820-8c47-eadc1b780de7; detaching it from the instance and deleting it from the info cache [ 853.808715] env[62923]: DEBUG nova.network.neutron [req-4dd4a125-1a95-4a10-bcbc-1d90e7857be0 req-373efce2-592e-4637-9b57-89793512b61c service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.011771] env[62923]: DEBUG nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 854.047560] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631281} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.048524] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 854.048750] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 854.049014] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c06b05fd-09e0-489a-85a8-db8450e882ca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.057491] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 854.057491] env[62923]: value = "task-1369965" [ 854.057491] env[62923]: _type = "Task" [ 854.057491] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.069342] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369965, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.221516] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090368} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.224380] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 854.225522] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b2f091-9c64-4388-9cab-923b302abd5f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.248034] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 7c3edceb-cc58-4925-a97a-3204936c836d/7c3edceb-cc58-4925-a97a-3204936c836d.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.249203] env[62923]: DEBUG nova.network.neutron [-] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.250309] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1be84b83-ed00-43b1-a5bc-9416ac0298db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.264992] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389c3fe4-256a-4f6e-8d99-7ce4be7e9b80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.275972] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da6e667-c35a-4cb3-b09d-9bf34570ab69 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.279479] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 854.279479] env[62923]: value = "task-1369966" [ 854.279479] env[62923]: _type = "Task" [ 854.279479] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.308514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1313efb9-3f43-4f07-8773-6b3cd8614f42 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.945s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.314266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5699d188-3e39-47bf-bf66-300909673029 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.322196] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.322988] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-915f2d5f-27f4-4ed3-8344-26713ddd8880 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.332301] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74628} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.333372] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 880cce70-5a0c-40a6-91b5-73d074feab6f/880cce70-5a0c-40a6-91b5-73d074feab6f.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 854.333659] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 854.336637] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54030190-4d13-4973-9e4d-4fe3701a4844 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.340428] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb78968-4d5a-4dbf-a0ea-6354d78278d3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.354912] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e865a72-b4d7-4b3c-9df9-d4f32bb65fa9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.370060] env[62923]: DEBUG nova.compute.provider_tree [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.378501] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 854.378501] env[62923]: value = "task-1369967" [ 854.378501] env[62923]: _type = "Task" [ 854.378501] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.378501] env[62923]: DEBUG nova.compute.manager [req-4dd4a125-1a95-4a10-bcbc-1d90e7857be0 req-373efce2-592e-4637-9b57-89793512b61c service nova] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Detach interface failed, port_id=7e696989-aebf-4820-8c47-eadc1b780de7, reason: Instance 6cf594e3-e4a6-45f5-b8d2-06db1c200042 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 854.379699] env[62923]: DEBUG nova.scheduler.client.report [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 854.391588] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.567572] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369965, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064269} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.567857] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 854.568700] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faff265-4026-4058-8d51-c042abdbdd6a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.588907] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.589509] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42566870-69f4-42f5-8da4-4ebe7c6dee08 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.609073] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 854.609073] env[62923]: value = "task-1369968" [ 854.609073] env[62923]: _type = "Task" [ 854.609073] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.617873] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.768765] env[62923]: INFO nova.compute.manager [-] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Took 1.43 seconds to deallocate network for instance. [ 854.792018] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369966, 'name': ReconfigVM_Task, 'duration_secs': 0.305452} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.792018] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 7c3edceb-cc58-4925-a97a-3204936c836d/7c3edceb-cc58-4925-a97a-3204936c836d.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.792018] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5793f4be-3ea6-4a49-907e-5e2e3fd006a0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.799020] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 854.799020] env[62923]: value = "task-1369969" [ 854.799020] env[62923]: _type = "Task" [ 854.799020] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.807604] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369969, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.824401] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 854.886567] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.884s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.886567] env[62923]: DEBUG nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 854.892633] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.748s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.895675] env[62923]: INFO nova.compute.claims [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.904466] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164444} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.904954] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 854.906466] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99206b71-1068-46a8-a7cd-8774acb95d09 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.934857] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 880cce70-5a0c-40a6-91b5-73d074feab6f/880cce70-5a0c-40a6-91b5-73d074feab6f.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.936105] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ff07153-efd8-4613-8c66-adf9f5d7e26e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.956642] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 854.956642] env[62923]: value = "task-1369970" [ 854.956642] env[62923]: _type = "Task" [ 854.956642] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.964963] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369970, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.023397] env[62923]: DEBUG nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 855.048639] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.048885] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.049061] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.049260] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.049405] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.049549] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.049750] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.049906] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.050338] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.050593] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.050831] env[62923]: DEBUG nova.virt.hardware [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.051749] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8ef121-1b94-47ab-99bc-20eeae7f89c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.059466] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c911b2f0-7d16-46cb-8aa6-28083df48995 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.075360] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 855.082720] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Creating folder: Project (e01f5bb435064648ac4468b327a68430). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 855.083202] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b24e4949-4228-40b7-9978-ad26240ebb04 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.094679] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Created folder: Project (e01f5bb435064648ac4468b327a68430) in parent group-v291405. [ 855.095158] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Creating folder: Instances. Parent ref: group-v291449. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 855.095496] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84f0aa95-6117-41d7-b62e-19eb38520b79 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.105235] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Created folder: Instances in parent group-v291449. [ 855.106351] env[62923]: DEBUG oslo.service.loopingcall [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.106648] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 855.106954] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-595909f0-3a5b-46d4-bb2e-330031fc8937 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.128857] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369968, 'name': ReconfigVM_Task, 'duration_secs': 0.477311} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.130832] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.131529] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 855.131529] env[62923]: value = "task-1369973" [ 855.131529] env[62923]: _type = "Task" [ 855.131529] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.131826] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-27bda7c6-c504-44eb-9668-a0e8d3dd14f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.141422] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 855.141422] env[62923]: value = "task-1369974" [ 855.141422] env[62923]: _type = "Task" [ 855.141422] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.145195] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369973, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.155094] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369974, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.259180] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93231f18-28db-4cea-bb5a-24f331df6247 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.268188] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Suspending the VM {{(pid=62923) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 855.268474] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-46ed47de-7f6f-4901-8625-a46575706da5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.275022] env[62923]: DEBUG oslo_vmware.api [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 855.275022] env[62923]: value = "task-1369975" [ 855.275022] env[62923]: _type = "Task" [ 855.275022] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.275934] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.285141] env[62923]: DEBUG oslo_vmware.api [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369975, 'name': SuspendVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.306483] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369969, 'name': Rename_Task, 'duration_secs': 0.170528} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.306762] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 855.307035] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-570f33ad-5326-41b2-9048-a5dd40e09918 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.313099] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 855.313099] env[62923]: value = "task-1369976" [ 855.313099] env[62923]: _type = "Task" [ 855.313099] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.320696] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.356021] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.401162] env[62923]: DEBUG nova.compute.utils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.405316] env[62923]: DEBUG nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 855.467835] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369970, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.644399] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369973, 'name': CreateVM_Task, 'duration_secs': 0.318981} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.644551] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.645026] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.645206] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.646102] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 855.646102] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-910e130b-4fae-4bfa-9c16-d6e70d0a818f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.653224] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 855.653224] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52436a24-47c4-d833-69ec-c46ef7d81db5" [ 855.653224] env[62923]: _type = "Task" [ 855.653224] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.656388] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369974, 'name': Rename_Task, 'duration_secs': 0.145426} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.659525] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 855.659795] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe43d499-b04a-4517-b69b-f57f9cd38133 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.666124] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52436a24-47c4-d833-69ec-c46ef7d81db5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.667454] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 855.667454] env[62923]: value = "task-1369977" [ 855.667454] env[62923]: _type = "Task" [ 855.667454] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.675783] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.786891] env[62923]: DEBUG oslo_vmware.api [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369975, 'name': SuspendVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.823612] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369976, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.908422] env[62923]: DEBUG nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 855.968547] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369970, 'name': ReconfigVM_Task, 'duration_secs': 0.789363} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.971733] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 880cce70-5a0c-40a6-91b5-73d074feab6f/880cce70-5a0c-40a6-91b5-73d074feab6f.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.972664] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1f7bdc0-d623-4296-b38e-ef0f0d52ee91 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.979188] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 855.979188] env[62923]: value = "task-1369978" [ 855.979188] env[62923]: _type = "Task" [ 855.979188] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.990596] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369978, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.167210] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52436a24-47c4-d833-69ec-c46ef7d81db5, 'name': SearchDatastore_Task, 'duration_secs': 0.01318} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.168627] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.169011] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 856.169311] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.169561] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.169814] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.170988] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e01945-2ff1-4cb5-9a32-615bad048b75 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.174056] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c83fc87-cc0a-41da-afe3-c344a6d35269 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.190020] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14faa619-8d29-4bc4-84b9-c68ad8d409f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.193988] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369977, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.199858] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.199858] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 856.199858] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4935e8ec-d2f9-42b7-9bcc-fa656bb348d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.230555] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11215a47-99f3-4151-8e29-276c098159bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.234806] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 856.234806] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52acb6bc-58c2-cddf-145f-a103dfa9931d" [ 856.234806] env[62923]: _type = "Task" [ 856.234806] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.242426] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6e1813-214f-458e-9122-72640a5b37c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.250320] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52acb6bc-58c2-cddf-145f-a103dfa9931d, 'name': SearchDatastore_Task, 'duration_secs': 0.008638} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.251435] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd7f182-465d-4f82-b1c4-4e688b8b53e1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.262803] env[62923]: DEBUG nova.compute.provider_tree [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.267265] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 856.267265] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a6a44c-0f01-cd5c-1e9c-6ee0f10d9f88" [ 856.267265] env[62923]: _type = "Task" [ 856.267265] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.275194] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a6a44c-0f01-cd5c-1e9c-6ee0f10d9f88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.288558] env[62923]: DEBUG oslo_vmware.api [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369975, 'name': SuspendVM_Task, 'duration_secs': 0.63464} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.288811] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Suspended the VM {{(pid=62923) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 856.288987] env[62923]: DEBUG nova.compute.manager [None req-a623319f-bbc7-43f8-9571-800de3927246 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 856.290474] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97015b9f-2738-43e3-8f47-de98b91af421 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.324257] env[62923]: DEBUG oslo_vmware.api [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369976, 'name': PowerOnVM_Task, 'duration_secs': 0.638547} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.324577] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 856.325105] env[62923]: INFO nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Took 10.24 seconds to spawn the instance on the hypervisor. [ 856.325305] env[62923]: DEBUG nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 856.326307] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e291cd69-ea75-4353-a6d4-90b3053732f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.491364] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369978, 'name': Rename_Task, 'duration_secs': 0.146936} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.491636] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 856.491872] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5279fb59-05eb-4d18-82e4-53c572a71482 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.499673] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 856.499673] env[62923]: value = "task-1369979" [ 856.499673] env[62923]: _type = "Task" [ 856.499673] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.507261] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.679718] env[62923]: DEBUG oslo_vmware.api [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369977, 'name': PowerOnVM_Task, 'duration_secs': 0.545619} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.679994] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 856.680247] env[62923]: INFO nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Took 5.84 seconds to spawn the instance on the hypervisor. [ 856.680442] env[62923]: DEBUG nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 856.682214] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fc167e-6c11-4761-a180-06573291c2d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.767105] env[62923]: DEBUG nova.scheduler.client.report [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.779032] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a6a44c-0f01-cd5c-1e9c-6ee0f10d9f88, 'name': SearchDatastore_Task, 'duration_secs': 0.00849} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.779364] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.779497] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 94d2670f-d858-437a-a166-d148a57e07ab/94d2670f-d858-437a-a166-d148a57e07ab.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.779740] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a23113a3-f565-4322-b71b-ca4aac6f6bd1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.787338] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 856.787338] env[62923]: value = "task-1369980" [ 856.787338] env[62923]: _type = "Task" [ 856.787338] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.795395] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.846944] env[62923]: INFO nova.compute.manager [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Took 24.54 seconds to build instance. [ 856.919857] env[62923]: DEBUG nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 856.944806] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.945087] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.945271] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.945459] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.945602] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.945745] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.945951] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.946119] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.946307] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.946465] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.946635] env[62923]: DEBUG nova.virt.hardware [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.947661] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff98d884-465f-4634-b11a-7c2accebb73b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.955774] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fcce2d-27fe-4cae-b84e-aa3cbf468ed8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.969670] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.975301] env[62923]: DEBUG oslo.service.loopingcall [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.975539] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.975754] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6acf778a-835a-4af8-9bf2-f46228f85dd1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.992693] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.992693] env[62923]: value = "task-1369981" [ 856.992693] env[62923]: _type = "Task" [ 856.992693] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.001505] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369981, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.011064] env[62923]: DEBUG oslo_vmware.api [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369979, 'name': PowerOnVM_Task, 'duration_secs': 0.427337} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.011308] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.011496] env[62923]: INFO nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Took 8.53 seconds to spawn the instance on the hypervisor. [ 857.012085] env[62923]: DEBUG nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 857.012415] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6eaaa7-1269-4268-bed3-1bcb3ea76d48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.198027] env[62923]: INFO nova.compute.manager [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Took 23.83 seconds to build instance. [ 857.273872] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.274875] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 857.280588] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.837s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.280908] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.281112] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 857.281493] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.341s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.281733] env[62923]: DEBUG nova.objects.instance [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lazy-loading 'resources' on Instance uuid 6fa4d8a8-093f-4ae8-9148-f15f5bf98944 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.288470] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ecd127-afc2-495a-b9b8-0f7016735f2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.301810] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "e6752138-5d66-469d-ac56-6bd169ad166e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.302125] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "e6752138-5d66-469d-ac56-6bd169ad166e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.315454] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870c7a17-18e6-49c9-912b-847f2194a394 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.320886] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369980, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.338632] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04042aef-beb9-4200-94d3-4323fe611459 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.349397] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb711a3d-2b9e-4c86-b3fd-4bf604ec0e5b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.353449] env[62923]: DEBUG oslo_concurrency.lockutils [None req-95313177-09c5-4822-9fb8-1ceae1a1d691 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "7c3edceb-cc58-4925-a97a-3204936c836d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.388s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.391055] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181441MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 857.391285] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.502075] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369981, 'name': CreateVM_Task, 'duration_secs': 0.423497} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.504502] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.505146] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.505548] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.505663] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 857.506136] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60df6fb7-1d46-4307-9e1b-27f9d164a132 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.510541] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 857.510541] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e1782b-b0b7-ac3d-46b2-98acde015c91" [ 857.510541] env[62923]: _type = "Task" [ 857.510541] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.519861] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e1782b-b0b7-ac3d-46b2-98acde015c91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.531702] env[62923]: INFO nova.compute.manager [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Took 24.79 seconds to build instance. [ 857.584834] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d6acb7-1bc7-4893-82df-9847134b1981 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.592746] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b72253-ea0e-43fd-8024-c8c5a1739b05 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.623115] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35ac0dd-fd9c-4b28-b29d-535e5b854cd3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.631205] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea645a2-98fb-46ce-ade6-980dd060d46b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.645421] env[62923]: DEBUG nova.compute.provider_tree [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.701102] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2017a4b0-70b5-4840-8540-5e710b1084a0 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "92c59517-7e6f-45bd-8211-789a718d66d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.328s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.787151] env[62923]: DEBUG nova.compute.utils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 857.788589] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 857.788753] env[62923]: DEBUG nova.network.neutron [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.811340] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667886} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.811641] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 94d2670f-d858-437a-a166-d148a57e07ab/94d2670f-d858-437a-a166-d148a57e07ab.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.811857] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.812118] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c15b542-45b7-4309-9909-6c100110fdf6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.818829] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 857.818829] env[62923]: value = "task-1369982" [ 857.818829] env[62923]: _type = "Task" [ 857.818829] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.828327] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.847955] env[62923]: DEBUG nova.policy [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d2829faa8f74da8a1432abd0c2434f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d290a91b3b4d9491f755fd3d7e7894', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 857.858070] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 858.022405] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e1782b-b0b7-ac3d-46b2-98acde015c91, 'name': SearchDatastore_Task, 'duration_secs': 0.067822} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.022768] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.023080] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.023435] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.026435] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.026435] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.026435] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6de9b65f-ea23-40b7-818b-d3e856a293c5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.033804] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82ffdaf8-9cc6-4c7a-817c-5cd69d12379b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.434s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.038548] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.039108] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.039897] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a8e0cb9-2412-411a-acc9-9c0be8b4ff00 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.045270] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 858.045270] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52588f24-de6e-3807-ed54-e3f3ec17d119" [ 858.045270] env[62923]: _type = "Task" [ 858.045270] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.053299] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52588f24-de6e-3807-ed54-e3f3ec17d119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.149280] env[62923]: DEBUG nova.scheduler.client.report [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.176400] env[62923]: DEBUG nova.network.neutron [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Successfully created port: 4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.291971] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 858.330145] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074636} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.331490] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.332343] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7a36d4-0344-4919-9eab-07a677877740 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.353265] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 94d2670f-d858-437a-a166-d148a57e07ab/94d2670f-d858-437a-a166-d148a57e07ab.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.353774] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef853149-8216-49e8-bd09-52a055c25187 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.378756] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 858.378756] env[62923]: value = "task-1369983" [ 858.378756] env[62923]: _type = "Task" [ 858.378756] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.388895] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369983, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.389817] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.557030] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52588f24-de6e-3807-ed54-e3f3ec17d119, 'name': SearchDatastore_Task, 'duration_secs': 0.03405} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.557992] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f67e32f-e863-42e5-b1ce-d303aa5ad97f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.563765] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 858.563765] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521b6236-7a3f-5320-8fc4-cadf6e2871e4" [ 858.563765] env[62923]: _type = "Task" [ 858.563765] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.571531] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521b6236-7a3f-5320-8fc4-cadf6e2871e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.658542] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.375s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.660757] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.464s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.662433] env[62923]: INFO nova.compute.claims [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.668064] env[62923]: DEBUG nova.compute.manager [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 858.668064] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d014dbad-0d5a-4e2e-9f24-25e01f158d1a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.673644] env[62923]: INFO nova.compute.manager [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Rebuilding instance [ 858.685457] env[62923]: INFO nova.scheduler.client.report [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Deleted allocations for instance 6fa4d8a8-093f-4ae8-9148-f15f5bf98944 [ 858.725983] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "7c3edceb-cc58-4925-a97a-3204936c836d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.726251] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "7c3edceb-cc58-4925-a97a-3204936c836d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.726569] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "7c3edceb-cc58-4925-a97a-3204936c836d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.726850] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "7c3edceb-cc58-4925-a97a-3204936c836d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.727230] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "7c3edceb-cc58-4925-a97a-3204936c836d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.728836] env[62923]: DEBUG nova.compute.manager [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 858.729536] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "880cce70-5a0c-40a6-91b5-73d074feab6f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.729536] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.729775] env[62923]: INFO nova.compute.manager [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Shelving [ 858.735018] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83e5d96-ab00-42c5-8589-522ebea629a5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.736773] env[62923]: INFO nova.compute.manager [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Terminating instance [ 858.739508] env[62923]: DEBUG nova.compute.manager [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 858.739825] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.741555] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb7d45a-c908-48ca-afc2-0c4a902a425e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.758362] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.758837] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-520e2f31-95fd-4acd-99f5-e55d7982215b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.767036] env[62923]: DEBUG oslo_vmware.api [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 858.767036] env[62923]: value = "task-1369984" [ 858.767036] env[62923]: _type = "Task" [ 858.767036] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.775736] env[62923]: DEBUG oslo_vmware.api [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.888830] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369983, 'name': ReconfigVM_Task, 'duration_secs': 0.272575} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.889124] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 94d2670f-d858-437a-a166-d148a57e07ab/94d2670f-d858-437a-a166-d148a57e07ab.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.889828] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11a4fec5-8424-4e83-9335-8bc9b463259d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.896457] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 858.896457] env[62923]: value = "task-1369985" [ 858.896457] env[62923]: _type = "Task" [ 858.896457] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.904989] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369985, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.075815] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521b6236-7a3f-5320-8fc4-cadf6e2871e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010496} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.076116] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.076374] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.076624] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4d95868-a4c0-4fdb-acf7-55ffbbb00669 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.083148] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 859.083148] env[62923]: value = "task-1369986" [ 859.083148] env[62923]: _type = "Task" [ 859.083148] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.090881] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.180213] env[62923]: INFO nova.compute.manager [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] instance snapshotting [ 859.180453] env[62923]: WARNING nova.compute.manager [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 859.183044] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac739de-33c7-46fe-b980-e67e5e519dce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.205221] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a44309ff-258b-460c-b104-aa88c2c06b52 tempest-ServerPasswordTestJSON-69700061 tempest-ServerPasswordTestJSON-69700061-project-member] Lock "6fa4d8a8-093f-4ae8-9148-f15f5bf98944" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.250s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.206467] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec9b3a1-1df7-4023-bbc0-b61381df5e02 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.244350] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.244604] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05023908-8e5e-419f-a8cb-841d8fed1c82 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.252201] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 859.252201] env[62923]: value = "task-1369987" [ 859.252201] env[62923]: _type = "Task" [ 859.252201] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.262336] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.262603] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369987, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.263049] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c406eece-0bae-486b-9eb9-03ee9a0ed2af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.272601] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 859.272601] env[62923]: value = "task-1369988" [ 859.272601] env[62923]: _type = "Task" [ 859.272601] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.280259] env[62923]: DEBUG oslo_vmware.api [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369984, 'name': PowerOffVM_Task, 'duration_secs': 0.238478} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.280259] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.280259] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.280259] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c36f3d9d-db48-4861-ab4c-fb72465637ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.284746] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.301370] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 859.338702] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 859.338956] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 859.339199] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.339293] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 859.339413] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.339565] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 859.339770] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 859.339927] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 859.340112] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 859.340274] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 859.340447] env[62923]: DEBUG nova.virt.hardware [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 859.341695] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e1da6f-0292-40bd-a87a-4538d5b55aea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.350694] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170f63f7-a891-4e5d-81eb-4df795827c03 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.407460] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369985, 'name': Rename_Task, 'duration_secs': 0.143759} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.407754] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.408021] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-055923ab-2890-4413-9881-1f5e21cdaa3d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.416019] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 859.416019] env[62923]: value = "task-1369990" [ 859.416019] env[62923]: _type = "Task" [ 859.416019] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.425866] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369990, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.470549] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.470779] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.470957] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleting the datastore file [datastore1] 7c3edceb-cc58-4925-a97a-3204936c836d {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.471245] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac1bdeff-ca10-4018-b6ee-663e0e293534 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.478436] env[62923]: DEBUG oslo_vmware.api [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 859.478436] env[62923]: value = "task-1369991" [ 859.478436] env[62923]: _type = "Task" [ 859.478436] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.486952] env[62923]: DEBUG oslo_vmware.api [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369991, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.593610] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369986, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.717102] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Creating Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 859.717377] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-94d1821b-619b-407a-88bb-78ecd174ea8b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.725844] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 859.725844] env[62923]: value = "task-1369992" [ 859.725844] env[62923]: _type = "Task" [ 859.725844] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.737476] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369992, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.763243] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369987, 'name': PowerOffVM_Task, 'duration_secs': 0.198857} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.766092] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.767530] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fcbaed-2259-4127-bd40-57f988c35d2b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.799829] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb64e19-558e-4292-9f15-bd480e546ff4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.812867] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369988, 'name': PowerOffVM_Task, 'duration_secs': 0.122421} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.813424] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.813649] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 859.814743] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cc6f20-bc46-450e-9c38-0e3238d69e8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.824561] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.824561] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-175bb35a-78a5-499c-bfe6-96c02598dd63 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.847587] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.847881] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.848138] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Deleting the datastore file [datastore2] 92c59517-7e6f-45bd-8211-789a718d66d1 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.851181] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7bcf9d0-a255-4cc1-a406-d67c1c86936a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.858855] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 859.858855] env[62923]: value = "task-1369994" [ 859.858855] env[62923]: _type = "Task" [ 859.858855] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.868620] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.930728] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369990, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.955061] env[62923]: DEBUG nova.compute.manager [req-8bd18cfa-e321-4e4d-8d06-5a4b5b2a5993 req-bcd72969-8d1f-4d20-ae35-0f0ded74fe36 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Received event network-vif-plugged-4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.955323] env[62923]: DEBUG oslo_concurrency.lockutils [req-8bd18cfa-e321-4e4d-8d06-5a4b5b2a5993 req-bcd72969-8d1f-4d20-ae35-0f0ded74fe36 service nova] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.955866] env[62923]: DEBUG oslo_concurrency.lockutils [req-8bd18cfa-e321-4e4d-8d06-5a4b5b2a5993 req-bcd72969-8d1f-4d20-ae35-0f0ded74fe36 service nova] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.955866] env[62923]: DEBUG oslo_concurrency.lockutils [req-8bd18cfa-e321-4e4d-8d06-5a4b5b2a5993 req-bcd72969-8d1f-4d20-ae35-0f0ded74fe36 service nova] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.955866] env[62923]: DEBUG nova.compute.manager [req-8bd18cfa-e321-4e4d-8d06-5a4b5b2a5993 req-bcd72969-8d1f-4d20-ae35-0f0ded74fe36 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] No waiting events found dispatching network-vif-plugged-4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 859.956242] env[62923]: WARNING nova.compute.manager [req-8bd18cfa-e321-4e4d-8d06-5a4b5b2a5993 req-bcd72969-8d1f-4d20-ae35-0f0ded74fe36 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Received unexpected event network-vif-plugged-4ba4e87a-6f39-4b74-87b4-12b093d28f4a for instance with vm_state building and task_state spawning. [ 859.959935] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbd0689-220f-4c8a-a088-32bb34cf4b57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.974722] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374561a4-5573-40b4-8af0-a85c30bf6211 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.010598] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee467e8-ab10-4d1c-a279-9fd20a24a8f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.016061] env[62923]: DEBUG oslo_vmware.api [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1369991, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33757} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.017252] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.017252] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 860.017252] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.017944] env[62923]: INFO nova.compute.manager [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Took 1.28 seconds to destroy the instance on the hypervisor. [ 860.018221] env[62923]: DEBUG oslo.service.loopingcall [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.020230] env[62923]: DEBUG nova.compute.manager [-] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 860.020333] env[62923]: DEBUG nova.network.neutron [-] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.023419] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eed705-17e8-4c2b-bff5-45d5cc534418 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.038471] env[62923]: DEBUG nova.compute.provider_tree [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 860.095391] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601455} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.095667] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.095924] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.096801] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66edff09-d273-4e86-957a-cae61a74de87 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.102963] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 860.102963] env[62923]: value = "task-1369995" [ 860.102963] env[62923]: _type = "Task" [ 860.102963] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.111826] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.112736] env[62923]: DEBUG nova.network.neutron [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Successfully updated port: 4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.240135] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369992, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.315358] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Creating Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 860.315678] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8c1df444-70f2-4c90-b753-912e92012032 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.323026] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 860.323026] env[62923]: value = "task-1369996" [ 860.323026] env[62923]: _type = "Task" [ 860.323026] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.333229] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369996, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.369568] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1369994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100575} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.369833] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.370029] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 860.370202] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.427464] env[62923]: DEBUG oslo_vmware.api [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369990, 'name': PowerOnVM_Task, 'duration_secs': 0.672218} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.428106] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.428719] env[62923]: INFO nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Took 5.40 seconds to spawn the instance on the hypervisor. [ 860.428719] env[62923]: DEBUG nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 860.429879] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78234d9-379b-49c8-bfc4-b0a627550320 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.560998] env[62923]: ERROR nova.scheduler.client.report [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [req-e94a71f7-a3b5-4d23-98b4-aaef61163e11] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e94a71f7-a3b5-4d23-98b4-aaef61163e11"}]} [ 860.582448] env[62923]: DEBUG nova.scheduler.client.report [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 860.599108] env[62923]: DEBUG nova.scheduler.client.report [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 860.599285] env[62923]: DEBUG nova.compute.provider_tree [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 860.611780] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071122} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.613041] env[62923]: DEBUG nova.scheduler.client.report [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 860.615104] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.616205] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6bd7fe-6559-49bd-85d8-6eafd4b1a8b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.619509] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.619547] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.619705] env[62923]: DEBUG nova.network.neutron [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.647329] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.649551] env[62923]: DEBUG nova.scheduler.client.report [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 860.656128] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ced567-fba6-4ffb-8bf5-385a601e8928 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.677387] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 860.677387] env[62923]: value = "task-1369997" [ 860.677387] env[62923]: _type = "Task" [ 860.677387] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.686419] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369997, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.741358] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369992, 'name': CreateSnapshot_Task, 'duration_secs': 0.584565} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.741917] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Created Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 860.743483] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672e2dc0-97fa-42e4-9230-0dbfdf228009 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.835441] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369996, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.930426] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66179f7-34da-4040-8266-ea5a21e21a8e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.939325] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc8ea0b-0dbd-4b60-a446-4f380dcca466 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.951367] env[62923]: INFO nova.compute.manager [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Took 25.77 seconds to build instance. [ 860.977604] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6965ec0f-78ed-46f8-bbff-645d4aaa5af9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.987722] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f921047e-aaba-439b-9cbe-bc9cdd0c68e9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.003712] env[62923]: DEBUG nova.compute.provider_tree [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 861.005547] env[62923]: DEBUG nova.network.neutron [-] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.162266] env[62923]: DEBUG nova.network.neutron [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.188236] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.266724] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Creating linked-clone VM from snapshot {{(pid=62923) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 861.268958] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0676bf53-0bef-4d33-93d6-32539b6cd526 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.278373] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 861.278373] env[62923]: value = "task-1369998" [ 861.278373] env[62923]: _type = "Task" [ 861.278373] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.286817] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369998, 'name': CloneVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.304842] env[62923]: DEBUG nova.network.neutron [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updating instance_info_cache with network_info: [{"id": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "address": "fa:16:3e:9a:a7:c3", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba4e87a-6f", "ovs_interfaceid": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.335744] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1369996, 'name': CreateSnapshot_Task, 'duration_secs': 0.550043} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.336137] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Created Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 861.337780] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b6cbe1-315d-4f52-952e-c41ab98f330b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.405639] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 861.405730] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 861.405876] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 861.406090] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 861.406270] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 861.406389] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 861.406590] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 861.406745] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 861.406907] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 861.407256] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 861.407465] env[62923]: DEBUG nova.virt.hardware [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.408645] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33dc54cd-71cb-4d43-9c95-94d6dc055998 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.416939] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a429bc91-472b-431f-8a80-a5c43ff38bd6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.430515] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.435963] env[62923]: DEBUG oslo.service.loopingcall [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.436232] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.436440] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-926fc5a0-d641-4468-bf63-caf49df8579e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.450611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cf50f8e-e645-402c-93f7-630c04e635bd tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "94d2670f-d858-437a-a166-d148a57e07ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.159s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.452566] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.452566] env[62923]: value = "task-1369999" [ 861.452566] env[62923]: _type = "Task" [ 861.452566] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.459934] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369999, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.509083] env[62923]: INFO nova.compute.manager [-] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Took 1.49 seconds to deallocate network for instance. [ 861.541317] env[62923]: DEBUG nova.scheduler.client.report [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 861.541636] env[62923]: DEBUG nova.compute.provider_tree [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 86 to 87 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 861.541768] env[62923]: DEBUG nova.compute.provider_tree [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 861.690772] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1369997, 'name': ReconfigVM_Task, 'duration_secs': 0.850438} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.691091] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.691806] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4812edfa-bd2f-46d6-a9b2-0da16ec1e5b7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.698611] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 861.698611] env[62923]: value = "task-1370000" [ 861.698611] env[62923]: _type = "Task" [ 861.698611] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.709638] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370000, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.787938] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369998, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.807772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.808111] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Instance network_info: |[{"id": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "address": "fa:16:3e:9a:a7:c3", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba4e87a-6f", "ovs_interfaceid": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 861.808761] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:a7:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a78d5760-0bb1-4476-9578-8ad3c3144439', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ba4e87a-6f39-4b74-87b4-12b093d28f4a', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.816190] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Creating folder: Project (76d290a91b3b4d9491f755fd3d7e7894). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.817031] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1094c02-84f7-4f68-bdc4-fb835c46da94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.827988] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Created folder: Project (76d290a91b3b4d9491f755fd3d7e7894) in parent group-v291405. [ 861.828199] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Creating folder: Instances. Parent ref: group-v291457. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.828441] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bf6bc3e-f9b5-48f9-a938-d2c9b04ebd4b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.839057] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Created folder: Instances in parent group-v291457. [ 861.839458] env[62923]: DEBUG oslo.service.loopingcall [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.839751] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.840092] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8b94602-928d-4bfc-a15d-632c891293fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.875661] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Creating linked-clone VM from snapshot {{(pid=62923) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 861.876070] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e216a8e7-5f05-4a67-87d7-0941cce6c373 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.886803] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 861.886803] env[62923]: value = "task-1370003" [ 861.886803] env[62923]: _type = "Task" [ 861.886803] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.888694] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.888694] env[62923]: value = "task-1370004" [ 861.888694] env[62923]: _type = "Task" [ 861.888694] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.905714] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370004, 'name': CreateVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.910487] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370003, 'name': CloneVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.965028] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1369999, 'name': CreateVM_Task, 'duration_secs': 0.449106} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.965028] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.965028] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.965205] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.965554] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.965838] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da0a6bf6-c5b1-437e-b173-4294439c9c8e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.972051] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 861.972051] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525028ff-7533-cee4-434d-1459d97f0715" [ 861.972051] env[62923]: _type = "Task" [ 861.972051] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.979767] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525028ff-7533-cee4-434d-1459d97f0715, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.013887] env[62923]: DEBUG nova.compute.manager [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Received event network-changed-4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 862.014200] env[62923]: DEBUG nova.compute.manager [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Refreshing instance network info cache due to event network-changed-4ba4e87a-6f39-4b74-87b4-12b093d28f4a. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 862.014294] env[62923]: DEBUG oslo_concurrency.lockutils [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] Acquiring lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.014464] env[62923]: DEBUG oslo_concurrency.lockutils [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] Acquired lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.014578] env[62923]: DEBUG nova.network.neutron [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Refreshing network info cache for port 4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.017552] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.047576] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.388s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.048159] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 862.050833] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.101s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.052615] env[62923]: INFO nova.compute.claims [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.208466] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370000, 'name': Rename_Task, 'duration_secs': 0.16854} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.208752] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 862.209054] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8fcf425-659c-4c6c-b468-07278e0011aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.215566] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 862.215566] env[62923]: value = "task-1370005" [ 862.215566] env[62923]: _type = "Task" [ 862.215566] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.223190] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370005, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.289397] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369998, 'name': CloneVM_Task} progress is 95%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.400419] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370003, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.405282] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370004, 'name': CreateVM_Task, 'duration_secs': 0.379474} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.405445] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.406131] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.406291] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.406593] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 862.406836] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c816f379-c9a6-4d3e-bbf9-a544933c7b3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.410901] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 862.410901] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c6a07f-c807-7674-9330-6ad6d434262b" [ 862.410901] env[62923]: _type = "Task" [ 862.410901] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.418481] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c6a07f-c807-7674-9330-6ad6d434262b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.482818] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525028ff-7533-cee4-434d-1459d97f0715, 'name': SearchDatastore_Task, 'duration_secs': 0.010647} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.482981] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.483231] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.483497] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.483683] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.483895] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.484202] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-096b3372-7eef-4603-82ef-698759c43eba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.491531] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.491711] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.492429] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5273bd10-dc05-4878-93d7-907a770e8809 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.499389] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 862.499389] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5210be9d-2835-6414-0f66-7d46d8df6362" [ 862.499389] env[62923]: _type = "Task" [ 862.499389] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.507629] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5210be9d-2835-6414-0f66-7d46d8df6362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.557650] env[62923]: DEBUG nova.compute.utils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 862.561990] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 862.562183] env[62923]: DEBUG nova.network.neutron [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.651559] env[62923]: DEBUG nova.policy [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd2eb4fc8d9b47db8b8384731aff11b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '035c2eb849e3432e8cb52d31d69b895b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.726060] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370005, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.790248] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1369998, 'name': CloneVM_Task, 'duration_secs': 1.275666} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.790568] env[62923]: INFO nova.virt.vmwareapi.vmops [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Created linked-clone VM from snapshot [ 862.791396] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94f190d-ef06-4c17-80e8-54934f282c95 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.798774] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Uploading image 8e3ac536-06db-4c19-acbd-1e18ce1b6eba {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 862.824875] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 862.824875] env[62923]: value = "vm-291455" [ 862.824875] env[62923]: _type = "VirtualMachine" [ 862.824875] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 862.825300] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-672d9258-f99f-4b53-b085-d9f5567a68b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.835670] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lease: (returnval){ [ 862.835670] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b2aacd-0798-77ff-203a-930fc73e2d1e" [ 862.835670] env[62923]: _type = "HttpNfcLease" [ 862.835670] env[62923]: } obtained for exporting VM: (result){ [ 862.835670] env[62923]: value = "vm-291455" [ 862.835670] env[62923]: _type = "VirtualMachine" [ 862.835670] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 862.835950] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the lease: (returnval){ [ 862.835950] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b2aacd-0798-77ff-203a-930fc73e2d1e" [ 862.835950] env[62923]: _type = "HttpNfcLease" [ 862.835950] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 862.842429] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 862.842429] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b2aacd-0798-77ff-203a-930fc73e2d1e" [ 862.842429] env[62923]: _type = "HttpNfcLease" [ 862.842429] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 862.900452] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370003, 'name': CloneVM_Task} progress is 95%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.921308] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c6a07f-c807-7674-9330-6ad6d434262b, 'name': SearchDatastore_Task, 'duration_secs': 0.008801} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.922086] env[62923]: DEBUG nova.network.neutron [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updated VIF entry in instance network info cache for port 4ba4e87a-6f39-4b74-87b4-12b093d28f4a. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.922499] env[62923]: DEBUG nova.network.neutron [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updating instance_info_cache with network_info: [{"id": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "address": "fa:16:3e:9a:a7:c3", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba4e87a-6f", "ovs_interfaceid": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.923791] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.924040] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.924281] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.924427] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.924604] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.925149] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecbee3f5-6476-4a7a-b72b-e05c655a5445 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.935969] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.936204] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.937169] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21283b19-1e23-4ecf-8054-3f971dc58e3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.942630] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 862.942630] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52766b83-1074-a7ae-b8c5-1577eddedf2b" [ 862.942630] env[62923]: _type = "Task" [ 862.942630] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.950751] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52766b83-1074-a7ae-b8c5-1577eddedf2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.010711] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5210be9d-2835-6414-0f66-7d46d8df6362, 'name': SearchDatastore_Task, 'duration_secs': 0.011063} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.011604] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df764f5b-60ea-4c07-986b-7a2693da9dc3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.017121] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 863.017121] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5255e57a-ac72-b13e-d673-ab9405808bb7" [ 863.017121] env[62923]: _type = "Task" [ 863.017121] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.025271] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5255e57a-ac72-b13e-d673-ab9405808bb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.062903] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 863.091135] env[62923]: DEBUG nova.network.neutron [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Successfully created port: ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.227017] env[62923]: DEBUG oslo_vmware.api [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370005, 'name': PowerOnVM_Task, 'duration_secs': 0.542282} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.229476] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.229746] env[62923]: INFO nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Took 6.31 seconds to spawn the instance on the hypervisor. [ 863.229846] env[62923]: DEBUG nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 863.231493] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4660070-c48b-43ef-8201-b2814afe3521 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.309663] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4de965-68b8-41da-b97c-77d20f7cceaf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.316777] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fe4e2f-60b3-44fb-bd04-32f194cd8619 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.349464] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f38b3af-b18a-4674-9a49-4777c4428b45 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.355543] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 863.355543] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b2aacd-0798-77ff-203a-930fc73e2d1e" [ 863.355543] env[62923]: _type = "HttpNfcLease" [ 863.355543] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 863.357635] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 863.357635] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b2aacd-0798-77ff-203a-930fc73e2d1e" [ 863.357635] env[62923]: _type = "HttpNfcLease" [ 863.357635] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 863.358399] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d35aa8-afb7-4dce-b41e-eae4dd06b896 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.361673] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd538581-71e3-47f5-92c6-259c9ac3958e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.371123] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5b5a9-ba82-7071-7450-50ab04033c7a/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 863.371217] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5b5a9-ba82-7071-7450-50ab04033c7a/disk-0.vmdk for reading. {{(pid=62923) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 863.379873] env[62923]: DEBUG nova.compute.provider_tree [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.439371] env[62923]: DEBUG oslo_concurrency.lockutils [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] Releasing lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.439748] env[62923]: DEBUG nova.compute.manager [req-9cdfd6cb-c742-4dd2-8465-5eaa67812070 req-fcb228cd-96bc-4ad3-ad77-4634bbc29a10 service nova] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Received event network-vif-deleted-27bb1fc5-45d6-4beb-a8b8-7c3817de011c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.452784] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370003, 'name': CloneVM_Task, 'duration_secs': 1.140386} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.456196] env[62923]: INFO nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Created linked-clone VM from snapshot [ 863.456463] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52766b83-1074-a7ae-b8c5-1577eddedf2b, 'name': SearchDatastore_Task, 'duration_secs': 0.018295} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.457138] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82676f6-a574-4693-bb3e-848115a037ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.459880] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef69f31f-9a73-41a2-9b8f-8c5bdf356413 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.466630] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Uploading image dba42fad-df57-4284-bd83-249531882aca {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 863.469240] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 863.469240] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520c8f67-b55b-3ace-34da-de31a776d97e" [ 863.469240] env[62923]: _type = "Task" [ 863.469240] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.479141] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520c8f67-b55b-3ace-34da-de31a776d97e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.481353] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63a699f6-8551-40c0-a9b2-2e476cd33b96 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.493773] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 863.493773] env[62923]: value = "vm-291460" [ 863.493773] env[62923]: _type = "VirtualMachine" [ 863.493773] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 863.494423] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e06262d5-232b-4e67-ba12-479a78d8ce47 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.504442] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lease: (returnval){ [ 863.504442] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abc742-4fa6-ece1-3a04-bd228d655260" [ 863.504442] env[62923]: _type = "HttpNfcLease" [ 863.504442] env[62923]: } obtained for exporting VM: (result){ [ 863.504442] env[62923]: value = "vm-291460" [ 863.504442] env[62923]: _type = "VirtualMachine" [ 863.504442] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 863.505536] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the lease: (returnval){ [ 863.505536] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abc742-4fa6-ece1-3a04-bd228d655260" [ 863.505536] env[62923]: _type = "HttpNfcLease" [ 863.505536] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 863.517148] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 863.517148] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abc742-4fa6-ece1-3a04-bd228d655260" [ 863.517148] env[62923]: _type = "HttpNfcLease" [ 863.517148] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 863.525147] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5255e57a-ac72-b13e-d673-ab9405808bb7, 'name': SearchDatastore_Task, 'duration_secs': 0.029314} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.525397] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.525639] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.525875] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62e27978-4d69-425a-a8db-7ae99c884911 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.533151] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 863.533151] env[62923]: value = "task-1370008" [ 863.533151] env[62923]: _type = "Task" [ 863.533151] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.540392] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.750525] env[62923]: INFO nova.compute.manager [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Took 27.63 seconds to build instance. [ 863.923359] env[62923]: DEBUG nova.scheduler.client.report [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 863.923880] env[62923]: DEBUG nova.compute.provider_tree [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 87 to 88 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 863.924362] env[62923]: DEBUG nova.compute.provider_tree [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.980857] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520c8f67-b55b-3ace-34da-de31a776d97e, 'name': SearchDatastore_Task, 'duration_secs': 0.010738} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.981170] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.981470] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 43065826-0f2b-48dc-bc42-8e0fd84fdcd3/43065826-0f2b-48dc-bc42-8e0fd84fdcd3.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.981752] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3164f5fb-3aca-4b93-adf2-03e21a78cfbb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.989395] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 863.989395] env[62923]: value = "task-1370009" [ 863.989395] env[62923]: _type = "Task" [ 863.989395] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.000322] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.017026] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 864.017026] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abc742-4fa6-ece1-3a04-bd228d655260" [ 864.017026] env[62923]: _type = "HttpNfcLease" [ 864.017026] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 864.017026] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 864.017026] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52abc742-4fa6-ece1-3a04-bd228d655260" [ 864.017026] env[62923]: _type = "HttpNfcLease" [ 864.017026] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 864.017639] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe64c520-13ae-4d2b-a972-025b59485909 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.026413] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263da6e-5f16-da16-4352-b436185963eb/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 864.026827] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263da6e-5f16-da16-4352-b436185963eb/disk-0.vmdk for reading. {{(pid=62923) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 864.087186] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 864.100397] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50933} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.100775] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.101048] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.101370] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6243fa7-7822-4095-b088-aaf8418d85d2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.109384] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 864.109384] env[62923]: value = "task-1370010" [ 864.109384] env[62923]: _type = "Task" [ 864.109384] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.114170] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.114678] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.115054] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.115739] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.116029] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.116420] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.116719] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.116893] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.117084] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.117257] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.117437] env[62923]: DEBUG nova.virt.hardware [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.118374] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af8b630-609d-48b6-8c49-44a01082058c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.127688] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.130808] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d0fb94-67df-4033-82e5-3ea5a646269d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.138748] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-450cef3d-c646-44cf-a726-8be4c8c34583 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.252694] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9860efc-a269-41e0-bb76-9b104200ab60 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "67a83e64-c8bd-499c-895a-11976d69195b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.798s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.430547] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.431978] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 864.434475] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.388s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.434654] env[62923]: DEBUG nova.objects.instance [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lazy-loading 'resources' on Instance uuid 81cca322-c1a0-4fbd-8013-0e4a4694ecfd {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.500071] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370009, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.620406] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090299} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.620800] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.621633] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edab10f-4013-4a1d-83c0-b177334f27e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.642523] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.642687] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22003a21-addf-42d9-8e58-a237f758ce52 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.663702] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 864.663702] env[62923]: value = "task-1370011" [ 864.663702] env[62923]: _type = "Task" [ 864.663702] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.671809] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370011, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.701244] env[62923]: DEBUG nova.compute.manager [req-fe5d2bdf-f829-4c89-ad4b-2e05c5cf08de req-2ec34307-bfd4-417b-842f-674f5403df0e service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Received event network-vif-plugged-ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.701523] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe5d2bdf-f829-4c89-ad4b-2e05c5cf08de req-2ec34307-bfd4-417b-842f-674f5403df0e service nova] Acquiring lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.701777] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe5d2bdf-f829-4c89-ad4b-2e05c5cf08de req-2ec34307-bfd4-417b-842f-674f5403df0e service nova] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.702077] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe5d2bdf-f829-4c89-ad4b-2e05c5cf08de req-2ec34307-bfd4-417b-842f-674f5403df0e service nova] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.702395] env[62923]: DEBUG nova.compute.manager [req-fe5d2bdf-f829-4c89-ad4b-2e05c5cf08de req-2ec34307-bfd4-417b-842f-674f5403df0e service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] No waiting events found dispatching network-vif-plugged-ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 864.702568] env[62923]: WARNING nova.compute.manager [req-fe5d2bdf-f829-4c89-ad4b-2e05c5cf08de req-2ec34307-bfd4-417b-842f-674f5403df0e service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Received unexpected event network-vif-plugged-ecac0071-434c-47b6-8739-8522443e6a35 for instance with vm_state building and task_state spawning. [ 864.725417] env[62923]: INFO nova.compute.manager [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Rebuilding instance [ 864.865381] env[62923]: DEBUG nova.network.neutron [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Successfully updated port: ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.937576] env[62923]: DEBUG nova.compute.utils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.943700] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 864.943700] env[62923]: DEBUG nova.network.neutron [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.989414] env[62923]: DEBUG nova.policy [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68e62d519b19448c8cac7f1b2e55a087', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3b09245b63144e9bbcb2262aef33a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 865.004442] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599787} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.004442] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 43065826-0f2b-48dc-bc42-8e0fd84fdcd3/43065826-0f2b-48dc-bc42-8e0fd84fdcd3.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.004442] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.004585] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb40507f-f13e-47a7-a738-3371db24ad35 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.012683] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 865.012683] env[62923]: value = "task-1370012" [ 865.012683] env[62923]: _type = "Task" [ 865.012683] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.026038] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.175102] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.189733] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff8add3-605a-41fd-9721-e9592b6684fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.198339] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c27b07d-b897-4337-b70a-44d2f8ef8536 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.235131] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b9bbc6-5318-4ef6-b049-c83d81e51f0f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.244883] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2bcccf-25fb-4d1b-9ba2-becac9378463 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.259988] env[62923]: DEBUG nova.compute.provider_tree [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.368139] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "refresh_cache-d8bed052-7d83-471f-a18f-67c4c16a1b4a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.368313] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquired lock "refresh_cache-d8bed052-7d83-471f-a18f-67c4c16a1b4a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.368466] env[62923]: DEBUG nova.network.neutron [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.387771] env[62923]: DEBUG nova.network.neutron [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Successfully created port: 0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.443671] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 865.526949] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126664} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.527505] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.528289] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af5c019-a294-47b2-bd2c-7272d7b4a3d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.551802] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 43065826-0f2b-48dc-bc42-8e0fd84fdcd3/43065826-0f2b-48dc-bc42-8e0fd84fdcd3.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.552165] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81d76b1f-2544-4886-9cd0-d763110d78b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.572513] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 865.572513] env[62923]: value = "task-1370013" [ 865.572513] env[62923]: _type = "Task" [ 865.572513] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.583695] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.674449] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370011, 'name': ReconfigVM_Task, 'duration_secs': 0.599778} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.674725] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 92c59517-7e6f-45bd-8211-789a718d66d1/92c59517-7e6f-45bd-8211-789a718d66d1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.675488] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a8532eb-d22c-4f55-837f-b767e34dd36a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.682129] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 865.682129] env[62923]: value = "task-1370014" [ 865.682129] env[62923]: _type = "Task" [ 865.682129] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.691043] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370014, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.795308] env[62923]: DEBUG nova.scheduler.client.report [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 865.795599] env[62923]: DEBUG nova.compute.provider_tree [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 88 to 89 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 865.795835] env[62923]: DEBUG nova.compute.provider_tree [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 865.924859] env[62923]: DEBUG nova.network.neutron [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.084544] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370013, 'name': ReconfigVM_Task, 'duration_secs': 0.493214} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.084544] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 43065826-0f2b-48dc-bc42-8e0fd84fdcd3/43065826-0f2b-48dc-bc42-8e0fd84fdcd3.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.085361] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e902985-21ec-4b6e-bd29-cce73afe565b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.092778] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 866.092778] env[62923]: value = "task-1370015" [ 866.092778] env[62923]: _type = "Task" [ 866.092778] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.103181] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370015, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.119357] env[62923]: DEBUG nova.network.neutron [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Updating instance_info_cache with network_info: [{"id": "ecac0071-434c-47b6-8739-8522443e6a35", "address": "fa:16:3e:1f:09:76", "network": {"id": "a011b9fb-b28a-42f9-8dd0-595d56320977", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-319942915-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "035c2eb849e3432e8cb52d31d69b895b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecac0071-43", "ovs_interfaceid": "ecac0071-434c-47b6-8739-8522443e6a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.192750] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370014, 'name': Rename_Task, 'duration_secs': 0.224478} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.193137] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.193500] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c73ef322-3deb-4c4d-b901-ce7d3e396b53 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.201838] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 866.201838] env[62923]: value = "task-1370016" [ 866.201838] env[62923]: _type = "Task" [ 866.201838] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.210516] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.301578] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.304839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.933s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.306759] env[62923]: INFO nova.compute.claims [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.322619] env[62923]: INFO nova.scheduler.client.report [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Deleted allocations for instance 81cca322-c1a0-4fbd-8013-0e4a4694ecfd [ 866.456583] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 866.603065] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370015, 'name': Rename_Task, 'duration_secs': 0.205294} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.603605] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.603605] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47bf298b-c1e9-4e8e-b8a2-407210c6d0f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.610718] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 866.610718] env[62923]: value = "task-1370017" [ 866.610718] env[62923]: _type = "Task" [ 866.610718] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.619927] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370017, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.623385] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Releasing lock "refresh_cache-d8bed052-7d83-471f-a18f-67c4c16a1b4a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.623385] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Instance network_info: |[{"id": "ecac0071-434c-47b6-8739-8522443e6a35", "address": "fa:16:3e:1f:09:76", "network": {"id": "a011b9fb-b28a-42f9-8dd0-595d56320977", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-319942915-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "035c2eb849e3432e8cb52d31d69b895b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecac0071-43", "ovs_interfaceid": "ecac0071-434c-47b6-8739-8522443e6a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 866.623385] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:09:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecac0071-434c-47b6-8739-8522443e6a35', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.630630] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Creating folder: Project (035c2eb849e3432e8cb52d31d69b895b). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.631341] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac7f9aa6-c6c6-47c0-a102-775ebc348d93 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.641897] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Created folder: Project (035c2eb849e3432e8cb52d31d69b895b) in parent group-v291405. [ 866.642189] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Creating folder: Instances. Parent ref: group-v291461. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.642503] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa9a3054-afcf-46c4-ad22-ffdd500fcc97 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.651873] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Created folder: Instances in parent group-v291461. [ 866.652147] env[62923]: DEBUG oslo.service.loopingcall [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 866.652385] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.652629] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b736bcd-2555-4f16-b58d-49cf18b151d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.672255] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.672255] env[62923]: value = "task-1370020" [ 866.672255] env[62923]: _type = "Task" [ 866.672255] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.681183] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370020, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.712572] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370016, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.731659] env[62923]: DEBUG nova.compute.manager [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Received event network-changed-ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.731906] env[62923]: DEBUG nova.compute.manager [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Refreshing instance network info cache due to event network-changed-ecac0071-434c-47b6-8739-8522443e6a35. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 866.732191] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] Acquiring lock "refresh_cache-d8bed052-7d83-471f-a18f-67c4c16a1b4a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.732288] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] Acquired lock "refresh_cache-d8bed052-7d83-471f-a18f-67c4c16a1b4a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.732478] env[62923]: DEBUG nova.network.neutron [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Refreshing network info cache for port ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 866.832062] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ea5d4296-c2f7-4e4d-a42e-bf5392cb6856 tempest-SecurityGroupsTestJSON-724708089 tempest-SecurityGroupsTestJSON-724708089-project-member] Lock "81cca322-c1a0-4fbd-8013-0e4a4694ecfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.144s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.123173] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370017, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.170618] env[62923]: DEBUG nova.network.neutron [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Successfully updated port: 0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.183229] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370020, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.213028] env[62923]: DEBUG oslo_vmware.api [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370016, 'name': PowerOnVM_Task, 'duration_secs': 0.624574} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.213333] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.213539] env[62923]: DEBUG nova.compute.manager [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.214557] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3069132d-8dba-42b6-8ce9-f55d783cbc15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.520024] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e844d5c0-5f33-454c-a0cf-579da92d4a14 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.527743] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6652d2-2e60-411d-8cb4-5764780f81e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.560874] env[62923]: DEBUG nova.network.neutron [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Updated VIF entry in instance network info cache for port ecac0071-434c-47b6-8739-8522443e6a35. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.561511] env[62923]: DEBUG nova.network.neutron [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Updating instance_info_cache with network_info: [{"id": "ecac0071-434c-47b6-8739-8522443e6a35", "address": "fa:16:3e:1f:09:76", "network": {"id": "a011b9fb-b28a-42f9-8dd0-595d56320977", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-319942915-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "035c2eb849e3432e8cb52d31d69b895b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecac0071-43", "ovs_interfaceid": "ecac0071-434c-47b6-8739-8522443e6a35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.563481] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af171dee-056b-4a81-b968-aa5ce6a4e243 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.573904] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777361d7-706e-40c3-bb2e-30dfddb22ccc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.591808] env[62923]: DEBUG nova.compute.provider_tree [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.621898] env[62923]: DEBUG oslo_vmware.api [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370017, 'name': PowerOnVM_Task, 'duration_secs': 0.714072} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.622350] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.622569] env[62923]: INFO nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Took 8.32 seconds to spawn the instance on the hypervisor. [ 867.622829] env[62923]: DEBUG nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.623713] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e512559-e3a6-4be6-8546-dd3e0c330b11 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.673417] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.673517] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.673695] env[62923]: DEBUG nova.network.neutron [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.686212] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370020, 'name': CreateVM_Task, 'duration_secs': 0.543957} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.687160] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.687682] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.687857] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.688311] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.688711] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b244d67a-11df-4abd-9fa8-ff0e06ba6842 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.694445] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 867.694445] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5268a51e-1489-e479-2b06-9159683ded5b" [ 867.694445] env[62923]: _type = "Task" [ 867.694445] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.703715] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5268a51e-1489-e479-2b06-9159683ded5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.730950] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.068311] env[62923]: DEBUG oslo_concurrency.lockutils [req-bd011ece-6ab6-47b7-a588-85bb82c439d9 req-5d1fe141-782c-4004-83c6-3078ad657779 service nova] Releasing lock "refresh_cache-d8bed052-7d83-471f-a18f-67c4c16a1b4a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.095653] env[62923]: DEBUG nova.scheduler.client.report [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.143939] env[62923]: INFO nova.compute.manager [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Took 30.02 seconds to build instance. [ 868.206932] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5268a51e-1489-e479-2b06-9159683ded5b, 'name': SearchDatastore_Task, 'duration_secs': 0.019784} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.207285] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.207903] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.207903] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.207903] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.208093] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.208383] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4210390-7e9e-48ff-ac4d-9a5eab30a6c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.217651] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.217851] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.219179] env[62923]: DEBUG nova.network.neutron [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.220964] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1b6d140-9593-4b22-83e8-a98504456c27 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.226947] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 868.226947] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5240d404-da2c-7926-7f12-d0019d3ec49f" [ 868.226947] env[62923]: _type = "Task" [ 868.226947] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.235664] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5240d404-da2c-7926-7f12-d0019d3ec49f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.372653] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "92c59517-7e6f-45bd-8211-789a718d66d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.373026] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "92c59517-7e6f-45bd-8211-789a718d66d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.373338] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "92c59517-7e6f-45bd-8211-789a718d66d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.373561] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "92c59517-7e6f-45bd-8211-789a718d66d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.375060] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "92c59517-7e6f-45bd-8211-789a718d66d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.378068] env[62923]: INFO nova.compute.manager [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Terminating instance [ 868.380146] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "refresh_cache-92c59517-7e6f-45bd-8211-789a718d66d1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.380333] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquired lock "refresh_cache-92c59517-7e6f-45bd-8211-789a718d66d1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.380529] env[62923]: DEBUG nova.network.neutron [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 868.436214] env[62923]: DEBUG nova.network.neutron [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance_info_cache with network_info: [{"id": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "address": "fa:16:3e:57:cc:02", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb9a757-06", "ovs_interfaceid": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.601023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.601604] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 868.604319] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.328s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.604543] env[62923]: DEBUG nova.objects.instance [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lazy-loading 'resources' on Instance uuid 6cf594e3-e4a6-45f5-b8d2-06db1c200042 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.646506] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cfd07f61-5c98-43d2-9365-d00a31b81765 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.303s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.741279] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5240d404-da2c-7926-7f12-d0019d3ec49f, 'name': SearchDatastore_Task, 'duration_secs': 0.020259} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.741279] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-352bbed4-c097-426a-9236-859b35513be3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.746526] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 868.746526] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52144714-030c-1560-b163-1c39a81af8df" [ 868.746526] env[62923]: _type = "Task" [ 868.746526] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.760192] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52144714-030c-1560-b163-1c39a81af8df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.938922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.939246] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Instance network_info: |[{"id": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "address": "fa:16:3e:57:cc:02", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb9a757-06", "ovs_interfaceid": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 869.052912] env[62923]: DEBUG nova.network.neutron [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 869.108878] env[62923]: DEBUG nova.compute.utils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.112855] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 869.112855] env[62923]: DEBUG nova.network.neutron [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.197208] env[62923]: DEBUG nova.compute.manager [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Received event network-vif-plugged-0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.197208] env[62923]: DEBUG oslo_concurrency.lockutils [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] Acquiring lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.197208] env[62923]: DEBUG oslo_concurrency.lockutils [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.197468] env[62923]: DEBUG oslo_concurrency.lockutils [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.197547] env[62923]: DEBUG nova.compute.manager [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] No waiting events found dispatching network-vif-plugged-0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 869.197730] env[62923]: WARNING nova.compute.manager [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Received unexpected event network-vif-plugged-0eb9a757-0625-4e00-a9b0-55888eb57e7b for instance with vm_state building and task_state spawning. [ 869.197977] env[62923]: DEBUG nova.compute.manager [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Received event network-changed-0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.198052] env[62923]: DEBUG nova.compute.manager [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Refreshing instance network info cache due to event network-changed-0eb9a757-0625-4e00-a9b0-55888eb57e7b. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 869.198260] env[62923]: DEBUG oslo_concurrency.lockutils [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] Acquiring lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.198363] env[62923]: DEBUG oslo_concurrency.lockutils [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] Acquired lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.198489] env[62923]: DEBUG nova.network.neutron [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Refreshing network info cache for port 0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.257117] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52144714-030c-1560-b163-1c39a81af8df, 'name': SearchDatastore_Task, 'duration_secs': 0.015148} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.259757] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.260116] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] d8bed052-7d83-471f-a18f-67c4c16a1b4a/d8bed052-7d83-471f-a18f-67c4c16a1b4a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.261304] env[62923]: DEBUG nova.network.neutron [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.262361] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35999502-5aab-4e52-9834-ce438fe680f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.268961] env[62923]: DEBUG nova.policy [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37debff078b4389813658cbad297e65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0db41047d1004a1d9ca7f663178058da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 869.274413] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 869.274413] env[62923]: value = "task-1370021" [ 869.274413] env[62923]: _type = "Task" [ 869.274413] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.284607] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370021, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.324139] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a35e10e-18eb-47b7-a075-0cacb81b4d09 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.332621] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b717f90-7945-4030-be25-95c0488dede3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.367773] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a55b40f-e22c-4140-b3ea-3472b12204e0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.376159] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c30e60e-b054-4a31-9973-a03db5661cc5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.391162] env[62923]: DEBUG nova.compute.provider_tree [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.617823] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 869.765729] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Releasing lock "refresh_cache-92c59517-7e6f-45bd-8211-789a718d66d1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.766161] env[62923]: DEBUG nova.compute.manager [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 869.766394] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.767405] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cd06ca-2100-4335-ba97-9d83d477253a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.777317] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 869.780964] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04f4d38e-6b7d-4f2c-85b3-cc0ddfb25c2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.789434] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370021, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.791246] env[62923]: DEBUG oslo_vmware.api [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 869.791246] env[62923]: value = "task-1370022" [ 869.791246] env[62923]: _type = "Task" [ 869.791246] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.804212] env[62923]: DEBUG oslo_vmware.api [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.895221] env[62923]: DEBUG nova.scheduler.client.report [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.113078] env[62923]: DEBUG nova.network.neutron [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Successfully created port: cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 870.263610] env[62923]: DEBUG nova.network.neutron [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updated VIF entry in instance network info cache for port 0eb9a757-0625-4e00-a9b0-55888eb57e7b. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.264226] env[62923]: DEBUG nova.network.neutron [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance_info_cache with network_info: [{"id": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "address": "fa:16:3e:57:cc:02", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb9a757-06", "ovs_interfaceid": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.291431] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370021, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614187} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.291870] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] d8bed052-7d83-471f-a18f-67c4c16a1b4a/d8bed052-7d83-471f-a18f-67c4c16a1b4a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.292355] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.292749] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad5e840b-8c52-4d88-903d-ec86538760f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.306852] env[62923]: DEBUG oslo_vmware.api [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370022, 'name': PowerOffVM_Task, 'duration_secs': 0.231073} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.309202] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 870.309202] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 870.309202] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 870.309202] env[62923]: value = "task-1370023" [ 870.309202] env[62923]: _type = "Task" [ 870.309202] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.309202] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35c23130-e132-4856-b980-9570a815289b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.320191] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.336992] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.336992] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.336992] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Deleting the datastore file [datastore1] 92c59517-7e6f-45bd-8211-789a718d66d1 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.337315] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8585feb-bbab-459f-b996-ae2bafc5de6f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.344938] env[62923]: DEBUG oslo_vmware.api [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for the task: (returnval){ [ 870.344938] env[62923]: value = "task-1370025" [ 870.344938] env[62923]: _type = "Task" [ 870.344938] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.353865] env[62923]: DEBUG oslo_vmware.api [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.401304] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.797s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.404048] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.048s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.405890] env[62923]: INFO nova.compute.claims [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.441351] env[62923]: INFO nova.scheduler.client.report [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Deleted allocations for instance 6cf594e3-e4a6-45f5-b8d2-06db1c200042 [ 870.630569] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 870.768425] env[62923]: DEBUG oslo_concurrency.lockutils [req-5fba9b59-059e-4fb8-92ed-dcbfe20f16d1 req-48337b12-ebc3-4349-8425-ab285756152b service nova] Releasing lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.824111] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.857905] env[62923]: DEBUG oslo_vmware.api [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Task: {'id': task-1370025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245957} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.858292] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.858555] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.858821] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.859105] env[62923]: INFO nova.compute.manager [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Took 1.09 seconds to destroy the instance on the hypervisor. [ 870.859447] env[62923]: DEBUG oslo.service.loopingcall [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.859735] env[62923]: DEBUG nova.compute.manager [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 870.859870] env[62923]: DEBUG nova.network.neutron [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.876301] env[62923]: DEBUG nova.network.neutron [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.951346] env[62923]: DEBUG oslo_concurrency.lockutils [None req-22e56730-38ec-4b2e-af82-bee182c4f9bd tempest-InstanceActionsV221TestJSON-583097574 tempest-InstanceActionsV221TestJSON-583097574-project-member] Lock "6cf594e3-e4a6-45f5-b8d2-06db1c200042" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.728s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.272822] env[62923]: DEBUG nova.compute.manager [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Received event network-changed-4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.273045] env[62923]: DEBUG nova.compute.manager [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Refreshing instance network info cache due to event network-changed-4ba4e87a-6f39-4b74-87b4-12b093d28f4a. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.273320] env[62923]: DEBUG oslo_concurrency.lockutils [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] Acquiring lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.273469] env[62923]: DEBUG oslo_concurrency.lockutils [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] Acquired lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.273631] env[62923]: DEBUG nova.network.neutron [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Refreshing network info cache for port 4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.323961] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370023, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.848148} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.324297] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.325654] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b337e47-a3e8-467a-b3ed-c60b1082d3ad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.352074] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] d8bed052-7d83-471f-a18f-67c4c16a1b4a/d8bed052-7d83-471f-a18f-67c4c16a1b4a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.352891] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1710b943-7769-4cfc-9f36-bc4b7cc2a488 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.375706] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 871.375706] env[62923]: value = "task-1370026" [ 871.375706] env[62923]: _type = "Task" [ 871.375706] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.379069] env[62923]: DEBUG nova.network.neutron [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.386052] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370026, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.639692] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca499293-c387-4a6a-9ee1-0e6bd4463ee6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.647973] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c4f79d-8928-4e5e-a8f4-cc45403679c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.682608] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6793cb71-b638-44b6-8acd-7f357be6904f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.691625] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cf5269-844d-49c4-ab7a-5f63e338a335 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.706854] env[62923]: DEBUG nova.compute.provider_tree [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.881754] env[62923]: INFO nova.compute.manager [-] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Took 1.02 seconds to deallocate network for instance. [ 871.889462] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370026, 'name': ReconfigVM_Task, 'duration_secs': 0.497399} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.893220] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Reconfigured VM instance instance-0000004a to attach disk [datastore2] d8bed052-7d83-471f-a18f-67c4c16a1b4a/d8bed052-7d83-471f-a18f-67c4c16a1b4a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.898095] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbc8010c-6478-4ed4-9edf-721543d1e31a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.907833] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 871.907833] env[62923]: value = "task-1370027" [ 871.907833] env[62923]: _type = "Task" [ 871.907833] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.922390] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370027, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.069309] env[62923]: DEBUG nova.network.neutron [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Successfully updated port: cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 872.210227] env[62923]: DEBUG nova.scheduler.client.report [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.399947] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.415231] env[62923]: DEBUG nova.network.neutron [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updated VIF entry in instance network info cache for port 4ba4e87a-6f39-4b74-87b4-12b093d28f4a. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.415651] env[62923]: DEBUG nova.network.neutron [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updating instance_info_cache with network_info: [{"id": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "address": "fa:16:3e:9a:a7:c3", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba4e87a-6f", "ovs_interfaceid": "4ba4e87a-6f39-4b74-87b4-12b093d28f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.426168] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370027, 'name': Rename_Task, 'duration_secs': 0.2068} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.426793] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.429109] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ce89971-8e4a-403c-9776-455d59860326 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.437085] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 872.437085] env[62923]: value = "task-1370028" [ 872.437085] env[62923]: _type = "Task" [ 872.437085] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.451448] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.580163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.580163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.580163] env[62923]: DEBUG nova.network.neutron [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.711257] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 872.711437] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 872.711598] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.711785] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 872.711933] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.712099] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 872.712320] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 872.712473] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 872.712651] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 872.712813] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 872.712982] env[62923]: DEBUG nova.virt.hardware [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 872.715065] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb93462d-fea8-40d3-ae32-9976b8810da1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.719332] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.719830] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 872.725308] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 872.725603] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 872.725784] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.725971] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 872.726154] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.726312] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 872.726513] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 872.726669] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 872.726834] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 872.726996] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 872.727185] env[62923]: DEBUG nova.virt.hardware [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 872.730022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.338s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.733234] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0d5d95-7ee2-4c7d-b364-6ced01758542 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.740944] env[62923]: DEBUG nova.compute.manager [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 872.742775] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b99c028-f815-4703-889e-314dd7a848e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.747224] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a5264e-9252-4cb0-91ff-dc4d8a7d2b19 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.751931] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5b5a9-ba82-7071-7450-50ab04033c7a/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 872.754818] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986f46fc-31ff-4bf2-bf84-5d29e80b56d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.759538] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe1d6c5-d845-4a7b-be35-6858b765228f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.774691] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:cc:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91712705-510f-41a0-a803-2ecd92b676e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0eb9a757-0625-4e00-a9b0-55888eb57e7b', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.782365] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Creating folder: Project (b3b09245b63144e9bbcb2262aef33a21). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.788847] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27076d59-13e4-48b0-abdf-c664c118fa30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.789697] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5b5a9-ba82-7071-7450-50ab04033c7a/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 872.789872] env[62923]: ERROR oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5b5a9-ba82-7071-7450-50ab04033c7a/disk-0.vmdk due to incomplete transfer. [ 872.790647] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-577ae27e-e1e1-4845-bc4e-df76cadd4da5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.808440] env[62923]: DEBUG oslo_vmware.rw_handles [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e5b5a9-ba82-7071-7450-50ab04033c7a/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 872.808563] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Uploaded image 8e3ac536-06db-4c19-acbd-1e18ce1b6eba to the Glance image server {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 872.810847] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Destroying the VM {{(pid=62923) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 872.811178] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Created folder: Project (b3b09245b63144e9bbcb2262aef33a21) in parent group-v291405. [ 872.811343] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Creating folder: Instances. Parent ref: group-v291464. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.811870] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-98f6f58e-5f66-46a1-81f1-49f4d72e2ce0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.813479] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-997220cc-0aa0-476e-aafc-646f7d1d09ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.821632] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 872.821632] env[62923]: value = "task-1370030" [ 872.821632] env[62923]: _type = "Task" [ 872.821632] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.828976] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Created folder: Instances in parent group-v291464. [ 872.829279] env[62923]: DEBUG oslo.service.loopingcall [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.829914] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.831830] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c221a156-f132-4c29-918d-1c5d2bb60938 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.848033] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370030, 'name': Destroy_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.853923] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.853923] env[62923]: value = "task-1370032" [ 872.853923] env[62923]: _type = "Task" [ 872.853923] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.862191] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370032, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.927641] env[62923]: DEBUG oslo_concurrency.lockutils [req-71841906-4eb4-459d-82ec-14595f432309 req-62618d40-10e3-4b7a-be58-280d82e7f0a0 service nova] Releasing lock "refresh_cache-43065826-0f2b-48dc-bc42-8e0fd84fdcd3" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.947993] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370028, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.040974] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263da6e-5f16-da16-4352-b436185963eb/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 873.045281] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b67931-05b4-4047-ae89-c00e243dbae5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.051020] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263da6e-5f16-da16-4352-b436185963eb/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 873.051020] env[62923]: ERROR oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263da6e-5f16-da16-4352-b436185963eb/disk-0.vmdk due to incomplete transfer. [ 873.051020] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0c0c96c4-ce0e-40e8-b56a-078e91ac1513 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.059254] env[62923]: DEBUG oslo_vmware.rw_handles [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5263da6e-5f16-da16-4352-b436185963eb/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 873.059254] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Uploaded image dba42fad-df57-4284-bd83-249531882aca to the Glance image server {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 873.060236] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Destroying the VM {{(pid=62923) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 873.060513] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6b6d5039-c8bd-4822-a478-38dd66e51bc5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.067374] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 873.067374] env[62923]: value = "task-1370033" [ 873.067374] env[62923]: _type = "Task" [ 873.067374] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.076837] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370033, 'name': Destroy_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.183345] env[62923]: DEBUG nova.network.neutron [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.240542] env[62923]: DEBUG nova.compute.utils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 873.248701] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 873.249014] env[62923]: DEBUG nova.network.neutron [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 873.293254] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.293254] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b52dda3-90d6-427d-9950-60e9acceb976 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.301878] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 873.301878] env[62923]: value = "task-1370034" [ 873.301878] env[62923]: _type = "Task" [ 873.301878] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.313328] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.331020] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370030, 'name': Destroy_Task, 'duration_secs': 0.491279} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.331450] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Destroyed the VM [ 873.331805] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Deleting Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 873.332085] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f8d79cea-8ed2-4b41-b008-ba248be758d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.338883] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 873.338883] env[62923]: value = "task-1370035" [ 873.338883] env[62923]: _type = "Task" [ 873.338883] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.347291] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370035, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.362429] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370032, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.410221] env[62923]: DEBUG nova.policy [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '676a737149a9418498a55f83760df073', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1cf5e642524949a8366bf54d00593e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 873.449485] env[62923]: DEBUG oslo_vmware.api [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370028, 'name': PowerOnVM_Task, 'duration_secs': 0.915035} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.449848] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.450115] env[62923]: INFO nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Took 9.36 seconds to spawn the instance on the hypervisor. [ 873.450521] env[62923]: DEBUG nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 873.451378] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddae252-3ad5-474d-8d6a-6f676e590527 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.480209] env[62923]: DEBUG nova.compute.manager [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-vif-plugged-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.480209] env[62923]: DEBUG oslo_concurrency.lockutils [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.480209] env[62923]: DEBUG oslo_concurrency.lockutils [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.480209] env[62923]: DEBUG oslo_concurrency.lockutils [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.480209] env[62923]: DEBUG nova.compute.manager [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] No waiting events found dispatching network-vif-plugged-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 873.480209] env[62923]: WARNING nova.compute.manager [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received unexpected event network-vif-plugged-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e for instance with vm_state building and task_state spawning. [ 873.480209] env[62923]: DEBUG nova.compute.manager [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.480209] env[62923]: DEBUG nova.compute.manager [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing instance network info cache due to event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 873.480658] env[62923]: DEBUG oslo_concurrency.lockutils [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.487700] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.487922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.488174] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.488362] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.488550] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.492114] env[62923]: INFO nova.compute.manager [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Terminating instance [ 873.492913] env[62923]: DEBUG nova.compute.manager [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 873.493170] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.494040] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1137e74-2732-470a-879e-966b4b277775 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.501867] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 873.502198] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-960caa09-fc74-4ad1-9599-74f8f57e1dfd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.577791] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370033, 'name': Destroy_Task, 'duration_secs': 0.440417} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.578458] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Destroyed the VM [ 873.578769] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Deleting Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 873.578995] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f9befa7f-8343-4f03-a63c-1dd41bc02674 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.585936] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 873.585936] env[62923]: value = "task-1370037" [ 873.585936] env[62923]: _type = "Task" [ 873.585936] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.594685] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370037, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.710664] env[62923]: DEBUG nova.network.neutron [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.752047] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 873.798030] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798030] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance a616c7f0-8c39-4c08-a1a4-1d89e158d3c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798030] env[62923]: WARNING nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 7c3edceb-cc58-4925-a97a-3204936c836d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 873.798030] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 880cce70-5a0c-40a6-91b5-73d074feab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798030] env[62923]: WARNING nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 92c59517-7e6f-45bd-8211-789a718d66d1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 873.798030] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 94d2670f-d858-437a-a166-d148a57e07ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798030] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 67a83e64-c8bd-499c-895a-11976d69195b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798030] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798574] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d8bed052-7d83-471f-a18f-67c4c16a1b4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798574] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 066da19f-daf0-44e3-8ae0-89f0c970cb92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798574] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.798574] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 873.814438] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370034, 'name': PowerOffVM_Task, 'duration_secs': 0.215798} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.814814] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 873.815134] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.816803] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3609b54-81c9-4e30-8e88-96c550cbc274 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.825794] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 873.826214] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f9bac55-2944-4c1c-9b0d-f57905109c05 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.850118] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370035, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.863510] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370032, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.973771] env[62923]: INFO nova.compute.manager [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Took 34.81 seconds to build instance. [ 874.098217] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370037, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.191557] env[62923]: DEBUG nova.network.neutron [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Successfully created port: e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.217431] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.217776] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Instance network_info: |[{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 874.218466] env[62923]: DEBUG oslo_concurrency.lockutils [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.218716] env[62923]: DEBUG nova.network.neutron [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.219971] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:5e:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.233637] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Creating folder: Project (0db41047d1004a1d9ca7f663178058da). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.235084] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54284cba-9d72-474b-b2c6-0bcae9f143f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.249509] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Created folder: Project (0db41047d1004a1d9ca7f663178058da) in parent group-v291405. [ 874.249718] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Creating folder: Instances. Parent ref: group-v291467. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.249970] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78e4e1ba-7dc2-491a-95bc-5d07113a737a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.264573] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Created folder: Instances in parent group-v291467. [ 874.264880] env[62923]: DEBUG oslo.service.loopingcall [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.265389] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.265623] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6735ab07-efe0-462a-b1fe-f4082cd7cc2c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.290884] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.290884] env[62923]: value = "task-1370041" [ 874.290884] env[62923]: _type = "Task" [ 874.290884] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.302133] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370041, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.302133] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance e6752138-5d66-469d-ac56-6bd169ad166e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.302133] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 874.302133] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 874.355089] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370035, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.367944] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370032, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.478640] env[62923]: DEBUG oslo_concurrency.lockutils [None req-657d2b75-719d-46d8-aff3-b832edecfb6d tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.021s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.510177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.510610] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.551017] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b52e38-0a79-4238-be48-99ec321dd556 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.559161] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41b3672-51d2-4d23-8319-4a3cc3134d62 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.604066] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235d1c8b-c63d-466f-9524-79d1c48ffde3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.606950] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.607362] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.607701] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleting the datastore file [datastore2] 67a83e64-c8bd-499c-895a-11976d69195b {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.609024] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8429ec94-f88e-4fdb-8528-0a237ca5c7e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.621150] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370037, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.622317] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b624c33-52f8-43e5-b2ea-742a693b1f8d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.627057] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 874.627057] env[62923]: value = "task-1370042" [ 874.627057] env[62923]: _type = "Task" [ 874.627057] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.640818] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.649742] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.675734] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.675734] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.675734] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleting the datastore file [datastore1] a616c7f0-8c39-4c08-a1a4-1d89e158d3c5 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.675734] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2685c74e-4b5b-449f-a1c1-dad9d10c490a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.681978] env[62923]: DEBUG oslo_vmware.api [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 874.681978] env[62923]: value = "task-1370043" [ 874.681978] env[62923]: _type = "Task" [ 874.681978] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.695318] env[62923]: DEBUG oslo_vmware.api [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.768487] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 874.805349] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370041, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.807644] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 874.807872] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 874.808069] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.808261] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 874.808428] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.808576] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 874.808800] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 874.808973] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 874.809279] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 874.809505] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 874.809725] env[62923]: DEBUG nova.virt.hardware [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.810588] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020a4f3c-4e2f-419b-a192-5ca22bd7c793 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.818423] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d7ad15-af97-439f-933b-e3335e80e1ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.850118] env[62923]: DEBUG oslo_vmware.api [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370035, 'name': RemoveSnapshot_Task, 'duration_secs': 1.271946} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.850453] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Deleted Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 874.850746] env[62923]: INFO nova.compute.manager [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Took 15.67 seconds to snapshot the instance on the hypervisor. [ 874.865212] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370032, 'name': CreateVM_Task, 'duration_secs': 1.799751} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.865476] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.866345] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.866459] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.867564] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 874.867564] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21f3a343-13b6-4688-931d-569d6d25726a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.872699] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 874.872699] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c8631c-1adf-0a7c-b1e1-52c0e97762a7" [ 874.872699] env[62923]: _type = "Task" [ 874.872699] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.881282] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c8631c-1adf-0a7c-b1e1-52c0e97762a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.014075] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 875.109729] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370037, 'name': RemoveSnapshot_Task, 'duration_secs': 1.066988} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.109999] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Deleted Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 875.111037] env[62923]: DEBUG nova.compute.manager [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.112332] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0d0b51-57e8-4231-82aa-c11ae949728c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.141346] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122597} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.141776] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.141968] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.142313] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.145849] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.194467] env[62923]: DEBUG oslo_vmware.api [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171566} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.195120] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.195340] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.195567] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.196237] env[62923]: INFO nova.compute.manager [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Took 1.70 seconds to destroy the instance on the hypervisor. [ 875.196746] env[62923]: DEBUG oslo.service.loopingcall [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.196945] env[62923]: DEBUG nova.compute.manager [-] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.197606] env[62923]: DEBUG nova.network.neutron [-] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.244182] env[62923]: DEBUG nova.network.neutron [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updated VIF entry in instance network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.244576] env[62923]: DEBUG nova.network.neutron [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.302496] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370041, 'name': CreateVM_Task, 'duration_secs': 0.663412} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.302709] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.303357] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.355183] env[62923]: DEBUG nova.compute.manager [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Instance disappeared during snapshot {{(pid=62923) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 875.368959] env[62923]: DEBUG nova.compute.manager [None req-2a5a62ed-0de9-4920-a979-172193aec6b7 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image not found during clean up 8e3ac536-06db-4c19-acbd-1e18ce1b6eba {{(pid=62923) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4500}} [ 875.399460] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c8631c-1adf-0a7c-b1e1-52c0e97762a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009571} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.399771] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.400029] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.400444] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.400958] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.400958] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.401921] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.402428] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 875.402713] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5398e652-484e-488e-93d4-9d9e95a4ea55 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.405783] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d1729df-a8ed-4108-b8be-e30f96e97386 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.413645] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 875.413645] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ce6649-122a-43d1-d673-331b91fda436" [ 875.413645] env[62923]: _type = "Task" [ 875.413645] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.418967] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.419196] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.420255] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83023025-0ecf-47b5-9168-039f843fcb8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.426743] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ce6649-122a-43d1-d673-331b91fda436, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.429412] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 875.429412] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52717db9-ac2c-19f7-0e8f-6582008610b0" [ 875.429412] env[62923]: _type = "Task" [ 875.429412] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.438186] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52717db9-ac2c-19f7-0e8f-6582008610b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.545459] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.630690] env[62923]: DEBUG nova.compute.manager [req-b78cbb72-cd87-4ca7-99b8-67ce29590cb6 req-fdea9d8a-b6d9-47e1-8324-43d86c0a2b70 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Received event network-vif-deleted-96266d56-2661-429a-aa45-a2015a285f2a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.630902] env[62923]: INFO nova.compute.manager [req-b78cbb72-cd87-4ca7-99b8-67ce29590cb6 req-fdea9d8a-b6d9-47e1-8324-43d86c0a2b70 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Neutron deleted interface 96266d56-2661-429a-aa45-a2015a285f2a; detaching it from the instance and deleting it from the info cache [ 875.631108] env[62923]: DEBUG nova.network.neutron [req-b78cbb72-cd87-4ca7-99b8-67ce29590cb6 req-fdea9d8a-b6d9-47e1-8324-43d86c0a2b70 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.632333] env[62923]: INFO nova.compute.manager [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Shelve offloading [ 875.634716] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.635215] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-523df98f-cbdf-4c10-9c28-d28f02113649 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.643588] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 875.643588] env[62923]: value = "task-1370044" [ 875.643588] env[62923]: _type = "Task" [ 875.643588] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.655617] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 875.655839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.926s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.656446] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.657382] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.268s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.658834] env[62923]: INFO nova.compute.claims [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.748257] env[62923]: DEBUG oslo_concurrency.lockutils [req-c6fe6936-e6e8-443e-8d49-a07c04a330c4 req-193066c4-30a7-4ce1-a404-038dec91d49d service nova] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.924234] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ce6649-122a-43d1-d673-331b91fda436, 'name': SearchDatastore_Task, 'duration_secs': 0.01138} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.924619] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.924918] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.925194] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.938009] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52717db9-ac2c-19f7-0e8f-6582008610b0, 'name': SearchDatastore_Task, 'duration_secs': 0.01006} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.938935] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2821f3da-1eb9-4ca6-ab4f-c0d277cada5e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.944342] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 875.944342] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e47441-2e6b-16d1-9fda-fe096f382d0c" [ 875.944342] env[62923]: _type = "Task" [ 875.944342] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.952289] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e47441-2e6b-16d1-9fda-fe096f382d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.097561] env[62923]: DEBUG nova.network.neutron [-] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.136164] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-010030de-21b3-4d43-8e28-307c8eb19c8e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.151467] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b033e8d6-e7a9-4934-8011-34fdc13ba3af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.181665] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] VM already powered off {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 876.182451] env[62923]: DEBUG nova.compute.manager [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 876.183376] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2804ee-ecb8-4e2b-822c-370108baa216 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.198922] env[62923]: DEBUG nova.compute.manager [req-b78cbb72-cd87-4ca7-99b8-67ce29590cb6 req-fdea9d8a-b6d9-47e1-8324-43d86c0a2b70 service nova] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Detach interface failed, port_id=96266d56-2661-429a-aa45-a2015a285f2a, reason: Instance a616c7f0-8c39-4c08-a1a4-1d89e158d3c5 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 876.206351] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.207200] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.207200] env[62923]: DEBUG nova.network.neutron [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.209864] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 876.210130] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 876.210311] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.210512] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 876.210666] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.211154] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 876.211392] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 876.211578] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 876.211772] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 876.211954] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 876.212176] env[62923]: DEBUG nova.virt.hardware [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 876.213403] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153465ec-2d1a-4009-979f-bebfe59710d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.222569] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6fa11c-4cb3-4353-bfcc-9af615159f15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.240337] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.246762] env[62923]: DEBUG oslo.service.loopingcall [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.246971] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.247161] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64018a84-679a-404d-8026-34958bb9608d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.264630] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.264630] env[62923]: value = "task-1370045" [ 876.264630] env[62923]: _type = "Task" [ 876.264630] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.276052] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370045, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.455960] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e47441-2e6b-16d1-9fda-fe096f382d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.015963} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.456355] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.456635] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92/066da19f-daf0-44e3-8ae0-89f0c970cb92.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.456844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.457067] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.457271] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4ed8e3c-a019-4459-9bbc-915e0a910a73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.461157] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa72b3d9-f0ea-4cd7-9b0e-7e9f912a4016 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.466343] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 876.466343] env[62923]: value = "task-1370046" [ 876.466343] env[62923]: _type = "Task" [ 876.466343] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.471127] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.471327] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.472513] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9748348f-ad90-4784-b917-0d661431accc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.478462] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.481562] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 876.481562] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52da5297-6f59-4f32-f7d3-a5f1bee451eb" [ 876.481562] env[62923]: _type = "Task" [ 876.481562] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.489632] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52da5297-6f59-4f32-f7d3-a5f1bee451eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.577332] env[62923]: DEBUG nova.network.neutron [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Successfully updated port: e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.600706] env[62923]: INFO nova.compute.manager [-] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Took 1.40 seconds to deallocate network for instance. [ 876.661607] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.780428] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370045, 'name': CreateVM_Task, 'duration_secs': 0.452612} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.780550] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.780913] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.781656] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.781656] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 876.781656] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-090060af-23bf-4e51-9461-b89e496b4e10 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.787255] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 876.787255] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522957a9-8a67-256a-be8c-58c08f5cfdfd" [ 876.787255] env[62923]: _type = "Task" [ 876.787255] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.799437] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522957a9-8a67-256a-be8c-58c08f5cfdfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.987317] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370046, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.004839] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52da5297-6f59-4f32-f7d3-a5f1bee451eb, 'name': SearchDatastore_Task, 'duration_secs': 0.012257} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.005763] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b8d0eb2-6254-42fd-b9ac-43161e3f4a87 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.013088] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb3692a-0c1f-429e-9f02-09ea34c8f84c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.016211] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 877.016211] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5294d800-74f1-ffcc-ca92-74d9b7bf14a1" [ 877.016211] env[62923]: _type = "Task" [ 877.016211] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.026576] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e4788e-acf6-444a-9c36-a038e2ded7c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.033836] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5294d800-74f1-ffcc-ca92-74d9b7bf14a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.060987] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9da4ca-7147-4d7f-a9a8-f0135c1b0cad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.068675] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde51065-fec0-4f2c-8e1f-865080ca9867 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.085835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.085835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.085835] env[62923]: DEBUG nova.network.neutron [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.086722] env[62923]: DEBUG nova.compute.provider_tree [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.108136] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.168174] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 877.196725] env[62923]: DEBUG nova.network.neutron [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updating instance_info_cache with network_info: [{"id": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "address": "fa:16:3e:f0:eb:fd", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbeeed0-1b", "ovs_interfaceid": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.304175] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522957a9-8a67-256a-be8c-58c08f5cfdfd, 'name': SearchDatastore_Task, 'duration_secs': 0.063662} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.304175] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.304175] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.304368] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.401142] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.401201] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.405214] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.405458] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.405633] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.409870] env[62923]: INFO nova.compute.manager [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Terminating instance [ 877.410817] env[62923]: DEBUG nova.compute.manager [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 877.411013] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.411849] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb5f7b6-d807-4441-b284-c2a7dcd3225c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.424142] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.424398] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da432eb6-9f14-4515-aff4-48064928ef4c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.430949] env[62923]: DEBUG oslo_vmware.api [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 877.430949] env[62923]: value = "task-1370047" [ 877.430949] env[62923]: _type = "Task" [ 877.430949] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.441019] env[62923]: DEBUG oslo_vmware.api [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.481847] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596557} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.482181] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92/066da19f-daf0-44e3-8ae0-89f0c970cb92.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.482486] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.482753] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b39883ca-5355-4c38-b869-c52b73ffda6e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.490317] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 877.490317] env[62923]: value = "task-1370048" [ 877.490317] env[62923]: _type = "Task" [ 877.490317] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.499999] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.531652] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5294d800-74f1-ffcc-ca92-74d9b7bf14a1, 'name': SearchDatastore_Task, 'duration_secs': 0.027921} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.532611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.532611] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7/8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.532701] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.533071] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.533480] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e467f1a1-b6a2-42ed-8c07-fed0bd623c32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.536385] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e09ff31-2de2-4444-979b-af16956a86ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.546491] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 877.546491] env[62923]: value = "task-1370049" [ 877.546491] env[62923]: _type = "Task" [ 877.546491] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.548349] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.548816] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.552845] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a781343-541e-496f-9a4b-23b94a8c9d32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.562614] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 877.562614] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ed5099-e338-dca7-c6ec-e79b91aa134a" [ 877.562614] env[62923]: _type = "Task" [ 877.562614] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.562614] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.572568] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ed5099-e338-dca7-c6ec-e79b91aa134a, 'name': SearchDatastore_Task, 'duration_secs': 0.009441} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.573736] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d0be544-0087-48d4-84ad-6dadd6d2fc9e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.579508] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 877.579508] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528f6292-759d-6b4c-dabc-9afbd83675ef" [ 877.579508] env[62923]: _type = "Task" [ 877.579508] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.597591] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528f6292-759d-6b4c-dabc-9afbd83675ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.627931] env[62923]: ERROR nova.scheduler.client.report [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [req-4d6088cf-ef73-4326-a5a0-eaaabba81c0e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4d6088cf-ef73-4326-a5a0-eaaabba81c0e"}]} [ 877.647383] env[62923]: DEBUG nova.network.neutron [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.653640] env[62923]: DEBUG nova.scheduler.client.report [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 877.663303] env[62923]: DEBUG nova.compute.manager [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Received event network-vif-plugged-e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.663524] env[62923]: DEBUG oslo_concurrency.lockutils [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] Acquiring lock "534fa654-ed73-4518-bdc7-d1f981628fd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.663821] env[62923]: DEBUG oslo_concurrency.lockutils [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.663885] env[62923]: DEBUG oslo_concurrency.lockutils [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.664275] env[62923]: DEBUG nova.compute.manager [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] No waiting events found dispatching network-vif-plugged-e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 877.664518] env[62923]: WARNING nova.compute.manager [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Received unexpected event network-vif-plugged-e5f5c80e-b51d-4788-a346-d4ff5982fa57 for instance with vm_state building and task_state spawning. [ 877.664785] env[62923]: DEBUG nova.compute.manager [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Received event network-changed-e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.664852] env[62923]: DEBUG nova.compute.manager [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Refreshing instance network info cache due to event network-changed-e5f5c80e-b51d-4788-a346-d4ff5982fa57. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 877.665020] env[62923]: DEBUG oslo_concurrency.lockutils [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] Acquiring lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.682205] env[62923]: DEBUG nova.scheduler.client.report [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 877.682205] env[62923]: DEBUG nova.compute.provider_tree [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.699654] env[62923]: DEBUG nova.scheduler.client.report [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 877.702256] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.729021] env[62923]: DEBUG nova.scheduler.client.report [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 877.910204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.910204] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.911661] env[62923]: DEBUG nova.compute.manager [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.920150] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b16df1-e6dd-43f4-babd-07e325acb30e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.923095] env[62923]: DEBUG nova.network.neutron [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [{"id": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "address": "fa:16:3e:c8:44:3f", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5f5c80e-b5", "ovs_interfaceid": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.929383] env[62923]: DEBUG nova.compute.manager [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 877.930016] env[62923]: DEBUG nova.objects.instance [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'flavor' on Instance uuid 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.945876] env[62923]: DEBUG oslo_vmware.api [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370047, 'name': PowerOffVM_Task, 'duration_secs': 0.223658} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.945876] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.945876] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.945876] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23bb9194-2ce6-4759-a85d-ea942ef546e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.009254] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071342} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.011393] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.011812] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.011812] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.011983] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Deleting the datastore file [datastore2] d8bed052-7d83-471f-a18f-67c4c16a1b4a {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.014182] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d33fff-7e10-4893-b6fd-fb45e76daa02 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.016875] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e04ce0-509a-4244-9fab-fc5b6d1a2a6b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.020179] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23a36fa8-3df3-4bbe-92c8-841aa07331bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.028849] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c26856c-7214-44ce-958c-ce31ee956eab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.035755] env[62923]: DEBUG oslo_vmware.api [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for the task: (returnval){ [ 878.035755] env[62923]: value = "task-1370051" [ 878.035755] env[62923]: _type = "Task" [ 878.035755] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.054432] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92/066da19f-daf0-44e3-8ae0-89f0c970cb92.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.059054] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59270166-6b1b-467b-bdd2-6fe68c272284 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.111911] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cba4ed-eb76-4a66-bb8a-e65bd0b7efad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.123211] env[62923]: DEBUG oslo_vmware.api [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.123211] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 878.123211] env[62923]: value = "task-1370052" [ 878.123211] env[62923]: _type = "Task" [ 878.123211] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.128972] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502134} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.135520] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7/8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.135833] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.136218] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528f6292-759d-6b4c-dabc-9afbd83675ef, 'name': SearchDatastore_Task, 'duration_secs': 0.009234} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.137771] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07877cd0-476f-41af-9920-1393993c1ece {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.140246] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.140550] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.145599] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6327f0a4-322b-4173-a3ff-2a15b0dc4239 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.147846] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370052, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.149634] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b67150-b9bf-4148-8f4f-651095f83c9c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.155349] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.155544] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 878.155544] env[62923]: value = "task-1370053" [ 878.155544] env[62923]: _type = "Task" [ 878.155544] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.156912] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3e5c37-027f-417e-9f65-5a5ed9640559 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.161189] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 878.161189] env[62923]: value = "task-1370054" [ 878.161189] env[62923]: _type = "Task" [ 878.161189] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.174929] env[62923]: DEBUG nova.compute.provider_tree [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.179040] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.179854] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-256ce613-940a-4dfc-879e-8fdad66e8e73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.186622] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370054, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.187282] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.256796] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.257051] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.257245] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleting the datastore file [datastore1] 880cce70-5a0c-40a6-91b5-73d074feab6f {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.257522] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cd0950f-edd2-41a8-9bb3-571aa0646177 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.264736] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 878.264736] env[62923]: value = "task-1370056" [ 878.264736] env[62923]: _type = "Task" [ 878.264736] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.272148] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.427152] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.427152] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Instance network_info: |[{"id": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "address": "fa:16:3e:c8:44:3f", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5f5c80e-b5", "ovs_interfaceid": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 878.427354] env[62923]: DEBUG oslo_concurrency.lockutils [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] Acquired lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.427578] env[62923]: DEBUG nova.network.neutron [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Refreshing network info cache for port e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.429032] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:44:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5f5c80e-b51d-4788-a346-d4ff5982fa57', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.440081] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating folder: Project (2d1cf5e642524949a8366bf54d00593e). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.443008] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4672a17-f618-4c7d-9383-6c31091606a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.446796] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.447761] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73956697-7493-4f62-893c-94f814593c82 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.457353] env[62923]: DEBUG oslo_vmware.api [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 878.457353] env[62923]: value = "task-1370058" [ 878.457353] env[62923]: _type = "Task" [ 878.457353] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.459563] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created folder: Project (2d1cf5e642524949a8366bf54d00593e) in parent group-v291405. [ 878.460253] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating folder: Instances. Parent ref: group-v291471. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.464427] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38fd7d8a-2027-405d-a99b-a785714eb9dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.473296] env[62923]: DEBUG oslo_vmware.api [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.476882] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created folder: Instances in parent group-v291471. [ 878.477085] env[62923]: DEBUG oslo.service.loopingcall [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.478143] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.478143] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-152e1913-b88a-4b2f-b13c-22adfbbafc67 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.504108] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.504108] env[62923]: value = "task-1370060" [ 878.504108] env[62923]: _type = "Task" [ 878.504108] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.514819] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370060, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.563414] env[62923]: DEBUG oslo_vmware.api [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Task: {'id': task-1370051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150396} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.563995] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.564350] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.564643] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.565155] env[62923]: INFO nova.compute.manager [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 878.565607] env[62923]: DEBUG oslo.service.loopingcall [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.566009] env[62923]: DEBUG nova.compute.manager [-] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 878.566252] env[62923]: DEBUG nova.network.neutron [-] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.633518] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370052, 'name': ReconfigVM_Task, 'duration_secs': 0.280977} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.634063] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92/066da19f-daf0-44e3-8ae0-89f0c970cb92.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.634700] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39e4f0e4-7692-4b4d-8b2a-e139e33ce5aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.641980] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 878.641980] env[62923]: value = "task-1370061" [ 878.641980] env[62923]: _type = "Task" [ 878.641980] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.652601] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370061, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.673953] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062792} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.678261] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.678748] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370054, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477485} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.679623] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ce4964-aa41-4c74-b48a-645074622316 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.685338] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.685338] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.686294] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc1a3a38-7537-4e7d-9088-4b5dd25204ca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.720412] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7/8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.722748] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ceba75c-59bd-4842-9bad-4f7a203cb9b2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.740836] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 878.740836] env[62923]: value = "task-1370062" [ 878.740836] env[62923]: _type = "Task" [ 878.740836] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.748538] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 878.748538] env[62923]: value = "task-1370063" [ 878.748538] env[62923]: _type = "Task" [ 878.748538] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.749620] env[62923]: DEBUG nova.scheduler.client.report [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 878.750133] env[62923]: DEBUG nova.compute.provider_tree [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 90 to 91 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 878.750528] env[62923]: DEBUG nova.compute.provider_tree [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.764378] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370062, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.776957] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370063, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.784633] env[62923]: DEBUG oslo_vmware.api [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445695} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.785922] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.786261] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.786471] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.814527] env[62923]: INFO nova.scheduler.client.report [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocations for instance 880cce70-5a0c-40a6-91b5-73d074feab6f [ 878.969740] env[62923]: DEBUG oslo_vmware.api [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370058, 'name': PowerOffVM_Task, 'duration_secs': 0.321518} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.970526] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.970750] env[62923]: DEBUG nova.compute.manager [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 878.971675] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1019844-1854-4aec-829a-6bea1ce50931 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.014566] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370060, 'name': CreateVM_Task, 'duration_secs': 0.430206} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.017204] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.018202] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.018394] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.018752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 879.019395] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7edba95a-c24e-4820-9fe9-08d1aa8ddda8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.025538] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 879.025538] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52145caf-b778-8379-25e7-eb7ed975ed56" [ 879.025538] env[62923]: _type = "Task" [ 879.025538] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.034443] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52145caf-b778-8379-25e7-eb7ed975ed56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.151305] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370061, 'name': Rename_Task, 'duration_secs': 0.147406} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.151640] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.152139] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a56b705-cf03-4b0d-9bcc-2c1c79df8ded {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.158376] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 879.158376] env[62923]: value = "task-1370064" [ 879.158376] env[62923]: _type = "Task" [ 879.158376] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.170668] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.253328] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071812} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.253602] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.254854] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fe4d0f-a6b4-4d1f-a452-5328fbc4b180 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.268927] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.611s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.269467] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 879.280580] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.284235] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.267s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.284468] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.286607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.556s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.286821] env[62923]: DEBUG nova.objects.instance [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 879.289378] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ae69923-f9aa-4e86-94cd-8c50e5ee16ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.305233] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370063, 'name': ReconfigVM_Task, 'duration_secs': 0.364404} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.307851] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7/8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.308876] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd50d941-6d4a-4469-af26-64df0b290326 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.314130] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 879.314130] env[62923]: value = "task-1370065" [ 879.314130] env[62923]: _type = "Task" [ 879.314130] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.320010] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.320576] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 879.320576] env[62923]: value = "task-1370066" [ 879.320576] env[62923]: _type = "Task" [ 879.320576] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.329370] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.338244] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370066, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.348684] env[62923]: INFO nova.scheduler.client.report [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 7c3edceb-cc58-4925-a97a-3204936c836d [ 879.378036] env[62923]: DEBUG nova.network.neutron [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updated VIF entry in instance network info cache for port e5f5c80e-b51d-4788-a346-d4ff5982fa57. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.378619] env[62923]: DEBUG nova.network.neutron [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [{"id": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "address": "fa:16:3e:c8:44:3f", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5f5c80e-b5", "ovs_interfaceid": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.489054] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ae10c38f-7cd5-45cb-b4d4-b4addcf5d8c3 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.579s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.491072] env[62923]: DEBUG nova.network.neutron [-] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.541522] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52145caf-b778-8379-25e7-eb7ed975ed56, 'name': SearchDatastore_Task, 'duration_secs': 0.009518} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.541841] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.542085] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.542328] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.542470] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.542833] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.543403] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4411c455-b417-4efd-9d63-c1346b90e056 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.558806] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.559054] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.560364] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b2c8ad-4bae-46a2-9f3a-796f61e52e2a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.569016] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 879.569016] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523fc25b-bbe8-5fa3-1ca4-14be28ac87d0" [ 879.569016] env[62923]: _type = "Task" [ 879.569016] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.576262] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523fc25b-bbe8-5fa3-1ca4-14be28ac87d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.674026] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370064, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.765365] env[62923]: DEBUG nova.compute.manager [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Received event network-vif-unplugged-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.765597] env[62923]: DEBUG oslo_concurrency.lockutils [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] Acquiring lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.765803] env[62923]: DEBUG oslo_concurrency.lockutils [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.765974] env[62923]: DEBUG oslo_concurrency.lockutils [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.766264] env[62923]: DEBUG nova.compute.manager [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] No waiting events found dispatching network-vif-unplugged-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 879.766447] env[62923]: WARNING nova.compute.manager [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Received unexpected event network-vif-unplugged-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 for instance with vm_state shelved_offloaded and task_state None. [ 879.766614] env[62923]: DEBUG nova.compute.manager [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Received event network-changed-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.766781] env[62923]: DEBUG nova.compute.manager [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Refreshing instance network info cache due to event network-changed-9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 879.766952] env[62923]: DEBUG oslo_concurrency.lockutils [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] Acquiring lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.767116] env[62923]: DEBUG oslo_concurrency.lockutils [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] Acquired lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.767301] env[62923]: DEBUG nova.network.neutron [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Refreshing network info cache for port 9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.786430] env[62923]: DEBUG nova.compute.utils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.788247] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 879.788443] env[62923]: DEBUG nova.network.neutron [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.828408] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370065, 'name': ReconfigVM_Task, 'duration_secs': 0.449122} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.829147] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 67a83e64-c8bd-499c-895a-11976d69195b/67a83e64-c8bd-499c-895a-11976d69195b.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.829692] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a71aa43f-66cf-4374-8fb3-2dc7f2805419 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.834544] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370066, 'name': Rename_Task, 'duration_secs': 0.148247} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.835182] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.835428] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2196dc0-e05b-4238-9899-51cc6508917f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.839421] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 879.839421] env[62923]: value = "task-1370070" [ 879.839421] env[62923]: _type = "Task" [ 879.839421] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.843937] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 879.843937] env[62923]: value = "task-1370071" [ 879.843937] env[62923]: _type = "Task" [ 879.843937] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.852572] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370070, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.852572] env[62923]: DEBUG nova.policy [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '581e93fe11e74e1ead4a802dd4739d3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118fb52b52ea44aabbdbf8cb21e6ebf2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.859322] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.859806] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a9694448-70fd-4fe8-b066-a6a47184c0f1 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "7c3edceb-cc58-4925-a97a-3204936c836d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.134s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.881554] env[62923]: DEBUG oslo_concurrency.lockutils [req-4460a2ee-61f9-48af-94c1-737cd862ef08 req-dbc9be5d-e73d-4b92-9563-b31dab96e87b service nova] Releasing lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.995974] env[62923]: INFO nova.compute.manager [-] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Took 1.43 seconds to deallocate network for instance. [ 880.078733] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523fc25b-bbe8-5fa3-1ca4-14be28ac87d0, 'name': SearchDatastore_Task, 'duration_secs': 0.01466} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.079701] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a73abd1-f941-416f-8686-63f48708677d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.086765] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 880.086765] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d877f5-b9c3-a012-38bf-5c04bc7cd9be" [ 880.086765] env[62923]: _type = "Task" [ 880.086765] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.096775] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d877f5-b9c3-a012-38bf-5c04bc7cd9be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.171815] env[62923]: DEBUG oslo_vmware.api [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370064, 'name': PowerOnVM_Task, 'duration_secs': 0.585878} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.172236] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.172236] env[62923]: INFO nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Took 13.72 seconds to spawn the instance on the hypervisor. [ 880.172602] env[62923]: DEBUG nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 880.173331] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7485987e-4e54-4616-b6b2-97a3dab76036 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.187111] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.187359] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.292596] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 880.312903] env[62923]: DEBUG oslo_concurrency.lockutils [None req-318f7afc-595d-4413-8b0b-cb31521a5344 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.317779] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.918s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.318191] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.320373] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.775s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.321792] env[62923]: INFO nova.compute.claims [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.327823] env[62923]: DEBUG nova.network.neutron [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Successfully created port: d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.355868] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370070, 'name': Rename_Task, 'duration_secs': 0.209056} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.356612] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.356883] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aab989a2-7350-48a7-9d8d-98b24aa071c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.361866] env[62923]: DEBUG oslo_vmware.api [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370071, 'name': PowerOnVM_Task, 'duration_secs': 0.496882} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.362446] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.362649] env[62923]: INFO nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Took 9.73 seconds to spawn the instance on the hypervisor. [ 880.362854] env[62923]: DEBUG nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 880.363831] env[62923]: INFO nova.scheduler.client.report [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Deleted allocations for instance 92c59517-7e6f-45bd-8211-789a718d66d1 [ 880.365472] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6220eda-7d4e-477d-acd7-9decf22e9c40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.371325] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 880.371325] env[62923]: value = "task-1370072" [ 880.371325] env[62923]: _type = "Task" [ 880.371325] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.384722] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370072, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.504391] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.526019] env[62923]: DEBUG nova.objects.instance [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'flavor' on Instance uuid 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.596312] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d877f5-b9c3-a012-38bf-5c04bc7cd9be, 'name': SearchDatastore_Task, 'duration_secs': 0.014664} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.596584] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.596837] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 534fa654-ed73-4518-bdc7-d1f981628fd8/534fa654-ed73-4518-bdc7-d1f981628fd8.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.597112] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8996af8-9f69-4e80-8d9b-af73f9b5dd4d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.603444] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 880.603444] env[62923]: value = "task-1370073" [ 880.603444] env[62923]: _type = "Task" [ 880.603444] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.610699] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.692284] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 880.703613] env[62923]: INFO nova.compute.manager [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Took 40.79 seconds to build instance. [ 880.781713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "880cce70-5a0c-40a6-91b5-73d074feab6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.844997] env[62923]: DEBUG nova.network.neutron [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updated VIF entry in instance network info cache for port 9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.845422] env[62923]: DEBUG nova.network.neutron [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updating instance_info_cache with network_info: [{"id": "9fbeeed0-1b0f-416c-ba95-d8d2e8fd84b3", "address": "fa:16:3e:f0:eb:fd", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": null, "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap9fbeeed0-1b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.879025] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0013188f-19f7-40aa-88f2-4c391fd48689 tempest-ServerShowV257Test-253681236 tempest-ServerShowV257Test-253681236-project-member] Lock "92c59517-7e6f-45bd-8211-789a718d66d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.506s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.892862] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370072, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.893659] env[62923]: INFO nova.compute.manager [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Took 28.54 seconds to build instance. [ 881.031178] env[62923]: DEBUG oslo_concurrency.lockutils [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.031616] env[62923]: DEBUG oslo_concurrency.lockutils [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.031616] env[62923]: DEBUG nova.network.neutron [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.031838] env[62923]: DEBUG nova.objects.instance [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'info_cache' on Instance uuid 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.114697] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492919} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.114972] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 534fa654-ed73-4518-bdc7-d1f981628fd8/534fa654-ed73-4518-bdc7-d1f981628fd8.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.115199] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.117074] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c315bec7-d778-41fe-8acb-8acfba5e0e42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.122548] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 881.122548] env[62923]: value = "task-1370074" [ 881.122548] env[62923]: _type = "Task" [ 881.122548] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.132915] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370074, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.208909] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc6666c1-03ce-4d16-adf1-c65bde95cc95 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.685s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.218148] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.306963] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 881.335894] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.336575] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.336575] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.336730] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.336877] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.337018] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.337245] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.337413] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.337582] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.338166] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.338166] env[62923]: DEBUG nova.virt.hardware [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.341333] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246d3914-822b-48a0-a153-e774e8112c34 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.350509] env[62923]: DEBUG oslo_concurrency.lockutils [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] Releasing lock "refresh_cache-880cce70-5a0c-40a6-91b5-73d074feab6f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.350607] env[62923]: DEBUG nova.compute.manager [req-34aa0263-4670-40d0-a56d-6f447984c517 req-d3808d81-0049-4857-aef7-3b88f942c07d service nova] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Received event network-vif-deleted-ecac0071-434c-47b6-8739-8522443e6a35 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.352698] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3ff9f5-7871-422b-8330-43f5eb621c6c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.384694] env[62923]: DEBUG oslo_vmware.api [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370072, 'name': PowerOnVM_Task, 'duration_secs': 0.676988} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.385350] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.385350] env[62923]: DEBUG nova.compute.manager [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 881.386203] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f71cca-af45-4143-be63-d2e6a28f5f5d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.398363] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f3f5dd78-d45a-427a-9ea9-54ddc34e1f08 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.246s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.533462] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aaea496-c91d-4e46-b1b2-6698ff2554c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.537569] env[62923]: DEBUG nova.objects.base [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Object Instance<1fef5eb2-acb0-4d00-81a3-c270af7df0e8> lazy-loaded attributes: flavor,info_cache {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 881.544925] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45eed927-89ad-4eb2-91fc-cab0c82afdf9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.580248] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c16f37-fcc8-4494-a5b0-f5e39a059384 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.589014] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e702ba-ab15-4898-aaf4-a0cb8501e7d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.605691] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 881.633405] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370074, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082113} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.633735] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.635149] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb2b9fb-f6cb-4059-9186-16f907f0bf5f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.659404] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 534fa654-ed73-4518-bdc7-d1f981628fd8/534fa654-ed73-4518-bdc7-d1f981628fd8.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.661701] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-178dde70-61ff-4157-8cb8-3d176990a618 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.679549] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "0a9fdd83-3818-4831-90f9-9d30713961c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.679757] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.687234] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 881.687234] env[62923]: value = "task-1370075" [ 881.687234] env[62923]: _type = "Task" [ 881.687234] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.700632] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370075, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.906520] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.133782] env[62923]: ERROR nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [req-eb66689f-ada2-4c9e-ac37-3ca6412ad002] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eb66689f-ada2-4c9e-ac37-3ca6412ad002"}]} [ 882.164285] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 882.186675] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 882.186878] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.189261] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 882.207807] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.211075] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 882.240441] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 882.505901] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92af01a6-4bb4-4413-b992-64cc33bbf3f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.516061] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769a5b9e-43a7-4540-ac12-50df2dedb8d3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.557008] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23d6042-f7b5-4454-a83b-e03202ab9f40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.570987] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ef7c67-b1d9-40ac-9b4e-ebb49423c718 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.587229] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.705691] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370075, 'name': ReconfigVM_Task, 'duration_secs': 0.729371} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.705986] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 534fa654-ed73-4518-bdc7-d1f981628fd8/534fa654-ed73-4518-bdc7-d1f981628fd8.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.706696] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-587d8e34-a86c-4eec-a307-f28810346d20 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.710884] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.715479] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 882.715479] env[62923]: value = "task-1370077" [ 882.715479] env[62923]: _type = "Task" [ 882.715479] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.724204] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370077, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.794382] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "67a83e64-c8bd-499c-895a-11976d69195b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.794724] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "67a83e64-c8bd-499c-895a-11976d69195b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.794949] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "67a83e64-c8bd-499c-895a-11976d69195b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.795233] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "67a83e64-c8bd-499c-895a-11976d69195b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.795442] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "67a83e64-c8bd-499c-895a-11976d69195b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.799678] env[62923]: INFO nova.compute.manager [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Terminating instance [ 882.800544] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "refresh_cache-67a83e64-c8bd-499c-895a-11976d69195b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.800708] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "refresh_cache-67a83e64-c8bd-499c-895a-11976d69195b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.800875] env[62923]: DEBUG nova.network.neutron [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.954918] env[62923]: DEBUG nova.network.neutron [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.961374] env[62923]: DEBUG nova.compute.manager [req-3db89c82-f17f-4e72-86b4-2ae6c7ee3139 req-032f8cae-1d10-412c-9a30-b3b28279e210 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Received event network-vif-plugged-d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.961539] env[62923]: DEBUG oslo_concurrency.lockutils [req-3db89c82-f17f-4e72-86b4-2ae6c7ee3139 req-032f8cae-1d10-412c-9a30-b3b28279e210 service nova] Acquiring lock "e6752138-5d66-469d-ac56-6bd169ad166e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.961751] env[62923]: DEBUG oslo_concurrency.lockutils [req-3db89c82-f17f-4e72-86b4-2ae6c7ee3139 req-032f8cae-1d10-412c-9a30-b3b28279e210 service nova] Lock "e6752138-5d66-469d-ac56-6bd169ad166e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.961913] env[62923]: DEBUG oslo_concurrency.lockutils [req-3db89c82-f17f-4e72-86b4-2ae6c7ee3139 req-032f8cae-1d10-412c-9a30-b3b28279e210 service nova] Lock "e6752138-5d66-469d-ac56-6bd169ad166e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.962352] env[62923]: DEBUG nova.compute.manager [req-3db89c82-f17f-4e72-86b4-2ae6c7ee3139 req-032f8cae-1d10-412c-9a30-b3b28279e210 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] No waiting events found dispatching network-vif-plugged-d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.962576] env[62923]: WARNING nova.compute.manager [req-3db89c82-f17f-4e72-86b4-2ae6c7ee3139 req-032f8cae-1d10-412c-9a30-b3b28279e210 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Received unexpected event network-vif-plugged-d4711cea-8b7d-47e1-b973-510e3e511a8d for instance with vm_state building and task_state spawning. [ 883.076199] env[62923]: DEBUG nova.network.neutron [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Successfully updated port: d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.121626] env[62923]: ERROR nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [req-12c216d5-a856-4f70-9868-852594030b9b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-12c216d5-a856-4f70-9868-852594030b9b"}]} [ 883.153349] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 883.168323] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 883.168586] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.182314] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 883.202859] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 883.234059] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370077, 'name': Rename_Task, 'duration_secs': 0.278476} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.234059] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.234059] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9a62b9c-2c69-4c54-a18a-8e5e8d5f69c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.240981] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 883.240981] env[62923]: value = "task-1370078" [ 883.240981] env[62923]: _type = "Task" [ 883.240981] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.248968] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.323795] env[62923]: DEBUG nova.network.neutron [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.393246] env[62923]: DEBUG nova.network.neutron [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.421298] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e56bbfb-f54d-4f0c-920d-fa7a37591990 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.432394] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2950b3eb-a9c7-406d-84cd-d237716e1642 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.465900] env[62923]: DEBUG oslo_concurrency.lockutils [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.468753] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb86a6f-a02d-4841-8813-1e8ae4e72cce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.481318] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ec038e-f842-4f97-8069-173d8fb629d3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.503390] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.578206] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "refresh_cache-e6752138-5d66-469d-ac56-6bd169ad166e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.578607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquired lock "refresh_cache-e6752138-5d66-469d-ac56-6bd169ad166e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.578825] env[62923]: DEBUG nova.network.neutron [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.751583] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370078, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.897981] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "refresh_cache-67a83e64-c8bd-499c-895a-11976d69195b" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.898673] env[62923]: DEBUG nova.compute.manager [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 883.898950] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.900207] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504765e9-cfc5-46d1-8fff-70ea6bd43479 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.909687] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.910019] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-170e4a2e-7311-40f1-9ddd-189a0afcce32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.918075] env[62923]: DEBUG oslo_vmware.api [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 883.918075] env[62923]: value = "task-1370079" [ 883.918075] env[62923]: _type = "Task" [ 883.918075] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.929841] env[62923]: DEBUG oslo_vmware.api [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.969101] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.969434] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d921ba6-c88b-4ed1-ba52-c5a9e9477e75 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.977840] env[62923]: DEBUG oslo_vmware.api [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 883.977840] env[62923]: value = "task-1370080" [ 883.977840] env[62923]: _type = "Task" [ 883.977840] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.987843] env[62923]: DEBUG oslo_vmware.api [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.995597] env[62923]: DEBUG nova.compute.manager [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Stashing vm_state: active {{(pid=62923) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 884.024613] env[62923]: ERROR nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [req-68a795dc-35d2-48bc-8635-abe32e6e4f81] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-68a795dc-35d2-48bc-8635-abe32e6e4f81"}]} [ 884.043354] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 884.054688] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.055479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.057300] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 884.057508] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.070705] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 884.091904] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 884.137622] env[62923]: DEBUG nova.network.neutron [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.255431] env[62923]: DEBUG oslo_vmware.api [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370078, 'name': PowerOnVM_Task, 'duration_secs': 0.871363} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.255707] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.255887] env[62923]: INFO nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Took 9.49 seconds to spawn the instance on the hypervisor. [ 884.256098] env[62923]: DEBUG nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 884.256891] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1803a02b-1834-4897-844f-903a351562ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.332510] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64f484b-5b17-46c1-9b0f-f006d93bc49c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.340633] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770d8890-4902-4156-ab6b-d13ef2c3e2eb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.374227] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8878e45-c120-42f5-8165-a1fe5e3393b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.377940] env[62923]: DEBUG nova.network.neutron [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Updating instance_info_cache with network_info: [{"id": "d4711cea-8b7d-47e1-b973-510e3e511a8d", "address": "fa:16:3e:e0:72:34", "network": {"id": "b6152c91-fda4-490f-a63d-4bce21b4a9db", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-205683729-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "118fb52b52ea44aabbdbf8cb21e6ebf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4711cea-8b", "ovs_interfaceid": "d4711cea-8b7d-47e1-b973-510e3e511a8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.384687] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c1dc25-bd98-483a-b906-5626aefc3f1f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.398792] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.428765] env[62923]: DEBUG oslo_vmware.api [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370079, 'name': PowerOffVM_Task, 'duration_secs': 0.236991} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.429030] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.429252] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.429457] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe4309ca-c7fa-4b73-8462-139e4e47a8f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.453798] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.454095] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.454295] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleting the datastore file [datastore1] 67a83e64-c8bd-499c-895a-11976d69195b {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.454560] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e123912e-13e0-40a6-ab9d-3f6aa9f2f0c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.461110] env[62923]: DEBUG oslo_vmware.api [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 884.461110] env[62923]: value = "task-1370082" [ 884.461110] env[62923]: _type = "Task" [ 884.461110] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.469148] env[62923]: DEBUG oslo_vmware.api [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.486067] env[62923]: DEBUG oslo_vmware.api [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370080, 'name': PowerOnVM_Task, 'duration_secs': 0.443207} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.486354] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.486540] env[62923]: DEBUG nova.compute.manager [None req-50ecf28b-d8c3-4c8d-8b54-7f74a2c6343e tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 884.487312] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e69026-3672-47d3-a021-b1fa17dd24aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.515118] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.561029] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 884.776588] env[62923]: INFO nova.compute.manager [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Took 29.45 seconds to build instance. [ 884.880705] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Releasing lock "refresh_cache-e6752138-5d66-469d-ac56-6bd169ad166e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.881048] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Instance network_info: |[{"id": "d4711cea-8b7d-47e1-b973-510e3e511a8d", "address": "fa:16:3e:e0:72:34", "network": {"id": "b6152c91-fda4-490f-a63d-4bce21b4a9db", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-205683729-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "118fb52b52ea44aabbdbf8cb21e6ebf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4711cea-8b", "ovs_interfaceid": "d4711cea-8b7d-47e1-b973-510e3e511a8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 884.881661] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:72:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7894814c-6be3-4b80-a08e-4a771bc05dd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4711cea-8b7d-47e1-b973-510e3e511a8d', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.889762] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Creating folder: Project (118fb52b52ea44aabbdbf8cb21e6ebf2). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.890086] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ee08833-949b-43a4-82f9-d0842cfde4f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.904845] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Created folder: Project (118fb52b52ea44aabbdbf8cb21e6ebf2) in parent group-v291405. [ 884.905051] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Creating folder: Instances. Parent ref: group-v291477. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.906072] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0930bdf8-38bb-492f-b077-1a1d7c81fdc6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.916882] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Created folder: Instances in parent group-v291477. [ 884.917166] env[62923]: DEBUG oslo.service.loopingcall [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.917380] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.917602] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-008c9d9f-4f2f-45d3-bbe0-2c318306cfaa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.939545] env[62923]: DEBUG nova.scheduler.client.report [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 884.939802] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 96 to 97 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 884.939984] env[62923]: DEBUG nova.compute.provider_tree [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.950129] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.950129] env[62923]: value = "task-1370086" [ 884.950129] env[62923]: _type = "Task" [ 884.950129] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.959182] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370086, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.970921] env[62923]: DEBUG oslo_vmware.api [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293405} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.971211] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.971407] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.971592] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.971771] env[62923]: INFO nova.compute.manager [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Took 1.07 seconds to destroy the instance on the hypervisor. [ 884.972039] env[62923]: DEBUG oslo.service.loopingcall [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.972257] env[62923]: DEBUG nova.compute.manager [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.972355] env[62923]: DEBUG nova.network.neutron [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.993352] env[62923]: DEBUG nova.network.neutron [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.002620] env[62923]: DEBUG nova.compute.manager [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.002620] env[62923]: DEBUG nova.compute.manager [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing instance network info cache due to event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 885.002620] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.002863] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.003575] env[62923]: DEBUG nova.network.neutron [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.081970] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.278538] env[62923]: DEBUG oslo_concurrency.lockutils [None req-83527a71-875e-4e33-94ae-d2ff8c04497d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.396s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.445519] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.125s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.446077] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 885.449332] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.341s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.449585] env[62923]: DEBUG nova.objects.instance [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lazy-loading 'resources' on Instance uuid a616c7f0-8c39-4c08-a1a4-1d89e158d3c5 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.466372] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370086, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.506806] env[62923]: DEBUG nova.network.neutron [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.953996] env[62923]: DEBUG nova.compute.utils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 885.959022] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 885.959022] env[62923]: DEBUG nova.network.neutron [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 885.968936] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370086, 'name': CreateVM_Task, 'duration_secs': 0.603149} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.969100] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.970551] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.970599] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.970915] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 885.971469] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-704f81c0-b631-4417-9cd0-f749c996f7ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.976370] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 885.976370] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52fa54be-1bf9-3bd7-6a93-fd9032ba0b24" [ 885.976370] env[62923]: _type = "Task" [ 885.976370] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.984804] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52fa54be-1bf9-3bd7-6a93-fd9032ba0b24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.008551] env[62923]: INFO nova.compute.manager [-] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Took 1.04 seconds to deallocate network for instance. [ 886.089136] env[62923]: DEBUG nova.policy [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ad76ea94b62472fa3318cbbdb308ebe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1559d2844647aba922cae8e9d992e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 886.215617] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9545d68-dc85-484a-8282-6b1a341f36f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.224064] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72496eb4-bdb9-413d-a949-233d7fcf7c91 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.260835] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731ad5a5-a9f0-4481-958c-10814ef9b176 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.272019] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413d2bfa-2690-4757-9cda-34f877a33d70 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.283598] env[62923]: DEBUG nova.compute.provider_tree [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.462359] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 886.482177] env[62923]: DEBUG nova.network.neutron [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updated VIF entry in instance network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 886.482289] env[62923]: DEBUG nova.network.neutron [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.489330] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52fa54be-1bf9-3bd7-6a93-fd9032ba0b24, 'name': SearchDatastore_Task, 'duration_secs': 0.010835} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.489645] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.489870] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.491065] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.491065] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.491065] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.491065] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2f2da72-75f2-45e1-bcf6-37c11cf9ad49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.499515] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.499707] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.500457] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81707a3e-a1d0-4f64-9db6-e3659f9c1bb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.505568] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 886.505568] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525f8c24-337a-868a-933b-546dd4e8c539" [ 886.505568] env[62923]: _type = "Task" [ 886.505568] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.515179] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525f8c24-337a-868a-933b-546dd4e8c539, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.519442] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.788902] env[62923]: DEBUG nova.scheduler.client.report [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.860356] env[62923]: DEBUG nova.network.neutron [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Successfully created port: a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.988718] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.989015] env[62923]: DEBUG nova.compute.manager [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Received event network-changed-d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.989212] env[62923]: DEBUG nova.compute.manager [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Refreshing instance network info cache due to event network-changed-d4711cea-8b7d-47e1-b973-510e3e511a8d. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 886.989432] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] Acquiring lock "refresh_cache-e6752138-5d66-469d-ac56-6bd169ad166e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.989575] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] Acquired lock "refresh_cache-e6752138-5d66-469d-ac56-6bd169ad166e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.989737] env[62923]: DEBUG nova.network.neutron [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Refreshing network info cache for port d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.016446] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525f8c24-337a-868a-933b-546dd4e8c539, 'name': SearchDatastore_Task, 'duration_secs': 0.009543} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.017258] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84cb78d3-0405-423b-826e-bc7d341ffc5d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.022439] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 887.022439] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524a0709-70d7-24c4-ddcb-f67ebf83050a" [ 887.022439] env[62923]: _type = "Task" [ 887.022439] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.030264] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524a0709-70d7-24c4-ddcb-f67ebf83050a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.178498] env[62923]: DEBUG nova.compute.manager [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Received event network-changed-e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 887.179228] env[62923]: DEBUG nova.compute.manager [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Refreshing instance network info cache due to event network-changed-e5f5c80e-b51d-4788-a346-d4ff5982fa57. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 887.179228] env[62923]: DEBUG oslo_concurrency.lockutils [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] Acquiring lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.179228] env[62923]: DEBUG oslo_concurrency.lockutils [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] Acquired lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.179228] env[62923]: DEBUG nova.network.neutron [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Refreshing network info cache for port e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.296084] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.298631] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.979s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.299545] env[62923]: DEBUG nova.objects.instance [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'resources' on Instance uuid 880cce70-5a0c-40a6-91b5-73d074feab6f {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.322083] env[62923]: INFO nova.scheduler.client.report [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted allocations for instance a616c7f0-8c39-4c08-a1a4-1d89e158d3c5 [ 887.472833] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 887.501323] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 887.501590] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 887.501756] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.501942] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 887.502108] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.502264] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 887.502472] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 887.502633] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 887.502802] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 887.502965] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 887.503192] env[62923]: DEBUG nova.virt.hardware [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.504330] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ce4238-53a7-427d-bde7-6596dd2f91f4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.513985] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccae54bd-443e-4b9a-8a9a-cb3e2ce43b57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.537514] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524a0709-70d7-24c4-ddcb-f67ebf83050a, 'name': SearchDatastore_Task, 'duration_secs': 0.026909} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.537784] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.538170] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] e6752138-5d66-469d-ac56-6bd169ad166e/e6752138-5d66-469d-ac56-6bd169ad166e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 887.538325] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cda6808-dd99-4c01-b6f5-31b71c7ece3c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.547702] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 887.547702] env[62923]: value = "task-1370088" [ 887.547702] env[62923]: _type = "Task" [ 887.547702] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.555876] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370088, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.802361] env[62923]: DEBUG nova.objects.instance [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'numa_topology' on Instance uuid 880cce70-5a0c-40a6-91b5-73d074feab6f {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.834680] env[62923]: DEBUG oslo_concurrency.lockutils [None req-47ab7cba-009b-42b9-a292-90259e8f195d tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "a616c7f0-8c39-4c08-a1a4-1d89e158d3c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.347s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.872284] env[62923]: DEBUG nova.network.neutron [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Updated VIF entry in instance network info cache for port d4711cea-8b7d-47e1-b973-510e3e511a8d. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 887.872676] env[62923]: DEBUG nova.network.neutron [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Updating instance_info_cache with network_info: [{"id": "d4711cea-8b7d-47e1-b973-510e3e511a8d", "address": "fa:16:3e:e0:72:34", "network": {"id": "b6152c91-fda4-490f-a63d-4bce21b4a9db", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-205683729-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "118fb52b52ea44aabbdbf8cb21e6ebf2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4711cea-8b", "ovs_interfaceid": "d4711cea-8b7d-47e1-b973-510e3e511a8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.023229] env[62923]: DEBUG nova.network.neutron [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updated VIF entry in instance network info cache for port e5f5c80e-b51d-4788-a346-d4ff5982fa57. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.023717] env[62923]: DEBUG nova.network.neutron [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [{"id": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "address": "fa:16:3e:c8:44:3f", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5f5c80e-b5", "ovs_interfaceid": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.060784] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370088, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.305690] env[62923]: DEBUG nova.objects.base [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Object Instance<880cce70-5a0c-40a6-91b5-73d074feab6f> lazy-loaded attributes: resources,numa_topology {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 888.377060] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5d880aa-df40-4661-beee-b49739bccbed req-be8190b2-67dc-4d88-8fbf-f383c64ff012 service nova] Releasing lock "refresh_cache-e6752138-5d66-469d-ac56-6bd169ad166e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.488912] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3be8fa-9d2c-4af9-bfb8-dae874253fe0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.497507] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17826db2-1445-4497-a9e7-5beb95cf12c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.530045] env[62923]: DEBUG oslo_concurrency.lockutils [req-f2727652-686d-4444-8fb9-504c91f24e2c req-4e0d4a05-72a6-4828-ac1c-94d835afee32 service nova] Releasing lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.530283] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7218da9d-a532-4a12-a85b-8110f79fe1d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.538821] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1852e857-e284-4d57-8429-7e13b09e97d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.553887] env[62923]: DEBUG nova.compute.provider_tree [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.563807] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370088, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.810098} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.564686] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] e6752138-5d66-469d-ac56-6bd169ad166e/e6752138-5d66-469d-ac56-6bd169ad166e.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.564807] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.565063] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e9a180b-07b1-40d4-bfb9-9d549288214f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.572691] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 888.572691] env[62923]: value = "task-1370089" [ 888.572691] env[62923]: _type = "Task" [ 888.572691] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.580900] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.664168] env[62923]: DEBUG nova.network.neutron [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Successfully updated port: a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.059690] env[62923]: DEBUG nova.scheduler.client.report [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.083705] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370089, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.170112] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "refresh_cache-60805eeb-8287-4064-9bd3-a7c6a21f40b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.170222] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "refresh_cache-60805eeb-8287-4064-9bd3-a7c6a21f40b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.170372] env[62923]: DEBUG nova.network.neutron [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.204971] env[62923]: DEBUG nova.compute.manager [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Received event network-vif-plugged-a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 889.205344] env[62923]: DEBUG oslo_concurrency.lockutils [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] Acquiring lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.205684] env[62923]: DEBUG oslo_concurrency.lockutils [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.205989] env[62923]: DEBUG oslo_concurrency.lockutils [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.206293] env[62923]: DEBUG nova.compute.manager [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] No waiting events found dispatching network-vif-plugged-a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 889.206571] env[62923]: WARNING nova.compute.manager [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Received unexpected event network-vif-plugged-a126841e-2aec-49ea-b70c-e16e15f30bad for instance with vm_state building and task_state spawning. [ 889.206827] env[62923]: DEBUG nova.compute.manager [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Received event network-changed-a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 889.207095] env[62923]: DEBUG nova.compute.manager [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Refreshing instance network info cache due to event network-changed-a126841e-2aec-49ea-b70c-e16e15f30bad. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 889.207397] env[62923]: DEBUG oslo_concurrency.lockutils [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] Acquiring lock "refresh_cache-60805eeb-8287-4064-9bd3-a7c6a21f40b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.565536] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.267s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.568286] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.064s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.568444] env[62923]: DEBUG nova.objects.instance [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lazy-loading 'resources' on Instance uuid d8bed052-7d83-471f-a18f-67c4c16a1b4a {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.585022] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.536294} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.585022] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.585022] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fa361e-d2d9-4f06-a4a6-4e96b98b4cef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.607522] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] e6752138-5d66-469d-ac56-6bd169ad166e/e6752138-5d66-469d-ac56-6bd169ad166e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.608556] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdcdcaf7-609f-4022-a00c-b59fd06842fe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.631948] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 889.631948] env[62923]: value = "task-1370091" [ 889.631948] env[62923]: _type = "Task" [ 889.631948] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.646822] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370091, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.703443] env[62923]: DEBUG nova.network.neutron [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.899293] env[62923]: DEBUG nova.network.neutron [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Updating instance_info_cache with network_info: [{"id": "a126841e-2aec-49ea-b70c-e16e15f30bad", "address": "fa:16:3e:c7:d0:57", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa126841e-2a", "ovs_interfaceid": "a126841e-2aec-49ea-b70c-e16e15f30bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.077527] env[62923]: DEBUG oslo_concurrency.lockutils [None req-21825192-3f9a-46cc-af9c-dad06cc0cb62 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.348s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.078389] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 9.297s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.078636] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.078799] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.078963] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.080488] env[62923]: INFO nova.compute.manager [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Terminating instance [ 890.083351] env[62923]: DEBUG nova.compute.manager [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 890.083547] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.083815] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0e1d71c-f75c-440c-9db9-5c3395d630c5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.094515] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8d579e-464c-417c-8736-b83385dc0e3c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.129106] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 880cce70-5a0c-40a6-91b5-73d074feab6f could not be found. [ 890.129321] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.129477] env[62923]: INFO nova.compute.manager [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 890.129736] env[62923]: DEBUG oslo.service.loopingcall [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.132282] env[62923]: DEBUG nova.compute.manager [-] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.132388] env[62923]: DEBUG nova.network.neutron [-] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.142636] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370091, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.267747] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb26ec54-e8f4-4385-b8fd-d266bee26734 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.276605] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c44fea-97c2-48c0-ab26-445713c252da {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.312577] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8310ed99-7f87-4253-9108-7453d7b94abe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.321650] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ec0c6c-cbc3-45a3-9359-114ed44e2095 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.336772] env[62923]: DEBUG nova.compute.provider_tree [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.402480] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "refresh_cache-60805eeb-8287-4064-9bd3-a7c6a21f40b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.402890] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Instance network_info: |[{"id": "a126841e-2aec-49ea-b70c-e16e15f30bad", "address": "fa:16:3e:c7:d0:57", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa126841e-2a", "ovs_interfaceid": "a126841e-2aec-49ea-b70c-e16e15f30bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 890.403502] env[62923]: DEBUG oslo_concurrency.lockutils [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] Acquired lock "refresh_cache-60805eeb-8287-4064-9bd3-a7c6a21f40b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.403767] env[62923]: DEBUG nova.network.neutron [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Refreshing network info cache for port a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.405683] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:d0:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a126841e-2aec-49ea-b70c-e16e15f30bad', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.414928] env[62923]: DEBUG oslo.service.loopingcall [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.416324] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.416493] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e8aa184-888c-466b-b1d6-0b4d07b9dfc3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.441037] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.441037] env[62923]: value = "task-1370092" [ 890.441037] env[62923]: _type = "Task" [ 890.441037] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.453126] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370092, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.644664] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370091, 'name': ReconfigVM_Task, 'duration_secs': 0.542028} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.645042] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Reconfigured VM instance instance-0000004e to attach disk [datastore1] e6752138-5d66-469d-ac56-6bd169ad166e/e6752138-5d66-469d-ac56-6bd169ad166e.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.645634] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f15fdd6-bcb8-4815-8460-23714898ac9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.654881] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 890.654881] env[62923]: value = "task-1370093" [ 890.654881] env[62923]: _type = "Task" [ 890.654881] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.664364] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370093, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.841630] env[62923]: DEBUG nova.scheduler.client.report [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 890.867393] env[62923]: DEBUG nova.network.neutron [-] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.953014] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370092, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.166633] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370093, 'name': Rename_Task, 'duration_secs': 0.390036} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.166965] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 891.167274] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-379bcdec-8a5c-44bb-ab23-494df2d6474e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.173993] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 891.173993] env[62923]: value = "task-1370094" [ 891.173993] env[62923]: _type = "Task" [ 891.173993] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.182306] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.206309] env[62923]: DEBUG nova.network.neutron [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Updated VIF entry in instance network info cache for port a126841e-2aec-49ea-b70c-e16e15f30bad. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.207192] env[62923]: DEBUG nova.network.neutron [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Updating instance_info_cache with network_info: [{"id": "a126841e-2aec-49ea-b70c-e16e15f30bad", "address": "fa:16:3e:c7:d0:57", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa126841e-2a", "ovs_interfaceid": "a126841e-2aec-49ea-b70c-e16e15f30bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.350027] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.782s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.352934] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.135s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.354514] env[62923]: INFO nova.compute.claims [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.369864] env[62923]: INFO nova.compute.manager [-] [instance: 880cce70-5a0c-40a6-91b5-73d074feab6f] Took 1.24 seconds to deallocate network for instance. [ 891.378660] env[62923]: INFO nova.scheduler.client.report [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Deleted allocations for instance d8bed052-7d83-471f-a18f-67c4c16a1b4a [ 891.452520] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370092, 'name': CreateVM_Task, 'duration_secs': 0.709548} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.452762] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.453697] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.453697] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.453887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 891.454163] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bad3d74-1e01-4039-a587-117fb02ce591 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.458578] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 891.458578] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c785e4-b9ce-c27a-39e6-41695bb7e768" [ 891.458578] env[62923]: _type = "Task" [ 891.458578] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.466946] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c785e4-b9ce-c27a-39e6-41695bb7e768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.684336] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370094, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.709702] env[62923]: DEBUG oslo_concurrency.lockutils [req-46a78790-d198-4930-b549-8fe72a5da5a8 req-227777b5-f71a-4bab-9cc5-1353245ffa9d service nova] Releasing lock "refresh_cache-60805eeb-8287-4064-9bd3-a7c6a21f40b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.886039] env[62923]: DEBUG oslo_concurrency.lockutils [None req-aa201836-cf0b-4f88-91c1-2f3f61c8e3d0 tempest-VolumesAdminNegativeTest-1105086217 tempest-VolumesAdminNegativeTest-1105086217-project-member] Lock "d8bed052-7d83-471f-a18f-67c4c16a1b4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.485s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.970837] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c785e4-b9ce-c27a-39e6-41695bb7e768, 'name': SearchDatastore_Task, 'duration_secs': 0.019869} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.971194] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.971449] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.971700] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.971860] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.972068] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.972465] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fd5fa8b-2657-486c-b118-91c0ae1c1073 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.981061] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.981262] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.982010] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ab00444-b5a1-4adf-b016-555b63bc609f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.987410] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 891.987410] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52968c58-6df4-1158-bb76-22bf9252bc28" [ 891.987410] env[62923]: _type = "Task" [ 891.987410] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.995325] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52968c58-6df4-1158-bb76-22bf9252bc28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.184737] env[62923]: DEBUG oslo_vmware.api [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370094, 'name': PowerOnVM_Task, 'duration_secs': 0.837186} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.185028] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.185304] env[62923]: INFO nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Took 10.88 seconds to spawn the instance on the hypervisor. [ 892.185488] env[62923]: DEBUG nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 892.186324] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35439bae-b3ba-4706-b9bc-7e92a1e2a13d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.402303] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cc3d5961-101a-408e-8bb3-e3a8aeb7cf83 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "880cce70-5a0c-40a6-91b5-73d074feab6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.320s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.501865] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52968c58-6df4-1158-bb76-22bf9252bc28, 'name': SearchDatastore_Task, 'duration_secs': 0.010105} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.502702] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fb7a10c-37ef-49ed-b0fc-d6bac5288c4d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.513247] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 892.513247] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52afd60b-a05e-4205-d2ff-49e321aa794f" [ 892.513247] env[62923]: _type = "Task" [ 892.513247] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.521302] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52afd60b-a05e-4205-d2ff-49e321aa794f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.571021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3a1356-389c-4c29-ab98-6f216b8d8e79 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.578790] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77ba7e8-907c-4e1f-bfc5-a0b737450757 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.610775] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d250e29-8c9d-4fe6-9fc2-3af6ebd602d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.617114] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17e7a1b-f179-4a0b-9455-9f2788d8bd19 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.630246] env[62923]: DEBUG nova.compute.provider_tree [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.704241] env[62923]: INFO nova.compute.manager [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Took 34.33 seconds to build instance. [ 892.940061] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.940466] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 893.034613] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52afd60b-a05e-4205-d2ff-49e321aa794f, 'name': SearchDatastore_Task, 'duration_secs': 0.03781} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.034945] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.035290] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 60805eeb-8287-4064-9bd3-a7c6a21f40b5/60805eeb-8287-4064-9bd3-a7c6a21f40b5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.035859] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e5c707c-a235-40b8-9ab5-bfea18d20164 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.044502] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 893.044502] env[62923]: value = "task-1370096" [ 893.044502] env[62923]: _type = "Task" [ 893.044502] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.051803] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.133641] env[62923]: DEBUG nova.scheduler.client.report [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.207676] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9a6e6e9f-f9a3-451d-a6fb-c74dd08a5941 tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "e6752138-5d66-469d-ac56-6bd169ad166e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.905s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.444861] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Didn't find any instances for network info cache update. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 893.556976] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370096, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.605553] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.605789] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.641639] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.642932] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 893.645493] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.740s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.646120] env[62923]: DEBUG nova.objects.instance [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 894.054428] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.853042} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.054730] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 60805eeb-8287-4064-9bd3-a7c6a21f40b5/60805eeb-8287-4064-9bd3-a7c6a21f40b5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.054918] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.055543] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddd181f1-ff7b-46b7-b64a-73d786a7c095 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.061598] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 894.061598] env[62923]: value = "task-1370097" [ 894.061598] env[62923]: _type = "Task" [ 894.061598] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.071953] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.108323] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 894.150124] env[62923]: DEBUG nova.compute.utils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.155483] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 894.155483] env[62923]: DEBUG nova.network.neutron [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.213325] env[62923]: DEBUG nova.policy [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3732884d90b416597cfc499ebf82e53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a855374ba4624ee78230d07b85b2ab8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.573416] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065009} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.573700] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.574500] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ff395b-7fdf-4186-bc65-b97a70413162 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.603405] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 60805eeb-8287-4064-9bd3-a7c6a21f40b5/60805eeb-8287-4064-9bd3-a7c6a21f40b5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.603405] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12e771ee-32e8-49ac-a977-2b6904ff54e6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.630028] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 894.630028] env[62923]: value = "task-1370098" [ 894.630028] env[62923]: _type = "Task" [ 894.630028] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.631444] env[62923]: DEBUG nova.network.neutron [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Successfully created port: 70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.637117] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370098, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.640988] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.659036] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 894.659885] env[62923]: DEBUG oslo_concurrency.lockutils [None req-af42d890-3748-44f8-8086-d7675249647c tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.665689] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.955s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.670222] env[62923]: INFO nova.compute.claims [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.934479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "e6752138-5d66-469d-ac56-6bd169ad166e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.934760] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "e6752138-5d66-469d-ac56-6bd169ad166e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.934967] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "e6752138-5d66-469d-ac56-6bd169ad166e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.935161] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "e6752138-5d66-469d-ac56-6bd169ad166e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.935421] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "e6752138-5d66-469d-ac56-6bd169ad166e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.937577] env[62923]: INFO nova.compute.manager [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Terminating instance [ 894.939396] env[62923]: DEBUG nova.compute.manager [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 894.939580] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.940422] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36c1c50-771a-4a13-aebb-ed51e2fb88dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.949956] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.950225] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc73621b-fc30-4749-ba89-de086f9a51f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.956858] env[62923]: DEBUG oslo_vmware.api [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 894.956858] env[62923]: value = "task-1370100" [ 894.956858] env[62923]: _type = "Task" [ 894.956858] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.964722] env[62923]: DEBUG oslo_vmware.api [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.138435] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370098, 'name': ReconfigVM_Task, 'duration_secs': 0.272911} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.139129] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 60805eeb-8287-4064-9bd3-a7c6a21f40b5/60805eeb-8287-4064-9bd3-a7c6a21f40b5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.139983] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca437ec-3005-40d6-a65a-b5059560f6a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.148032] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 895.148032] env[62923]: value = "task-1370101" [ 895.148032] env[62923]: _type = "Task" [ 895.148032] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.157236] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370101, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.467024] env[62923]: DEBUG oslo_vmware.api [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370100, 'name': PowerOffVM_Task, 'duration_secs': 0.251143} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.467354] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.467475] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.467776] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41bd379f-fff7-4878-a5fd-83965c1a302b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.527557] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.528098] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.528098] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Deleting the datastore file [datastore1] e6752138-5d66-469d-ac56-6bd169ad166e {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.528486] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e5b5121-0ed0-47ff-b707-6d81c53ee0f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.535136] env[62923]: DEBUG oslo_vmware.api [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for the task: (returnval){ [ 895.535136] env[62923]: value = "task-1370103" [ 895.535136] env[62923]: _type = "Task" [ 895.535136] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.542893] env[62923]: DEBUG oslo_vmware.api [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.657164] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370101, 'name': Rename_Task, 'duration_secs': 0.155539} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.657502] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.657805] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-688b1f5c-2151-42f4-83a0-87b5b32d38a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.663900] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 895.663900] env[62923]: value = "task-1370104" [ 895.663900] env[62923]: _type = "Task" [ 895.663900] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.672941] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 895.674920] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.701032] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 895.701199] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 895.701271] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.701521] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 895.701692] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.701893] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 895.702074] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 895.702245] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 895.702417] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 895.702582] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 895.702756] env[62923]: DEBUG nova.virt.hardware [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.703680] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebccc1d-bd26-47d1-b6af-9274585691fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.713947] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cfd389-fff1-4aa6-b6a6-d5f63eeb772c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.866866] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1651eff-743c-4c17-b666-21eb97e7322b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.874559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b7d8fb-ca3a-4b90-b6d3-ce8b6a01b3f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.921049] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf46210-1db5-4725-9d70-ca8a48855928 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.930871] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbbe572-7fcc-4f76-8517-082ed8fdbcb8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.939394] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.939618] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.939760] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 895.950508] env[62923]: DEBUG nova.compute.provider_tree [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 896.044637] env[62923]: DEBUG oslo_vmware.api [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Task: {'id': task-1370103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158119} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.045424] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.045424] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.045424] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.045589] env[62923]: INFO nova.compute.manager [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 896.045810] env[62923]: DEBUG oslo.service.loopingcall [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.045999] env[62923]: DEBUG nova.compute.manager [-] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 896.046112] env[62923]: DEBUG nova.network.neutron [-] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.174549] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370104, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.372795] env[62923]: DEBUG nova.compute.manager [req-0be02120-6100-40cb-b2de-2fed6d6e2196 req-78ca2427-7a2b-4c4a-b16b-7a120db0a7fd service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Received event network-vif-plugged-70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.373120] env[62923]: DEBUG oslo_concurrency.lockutils [req-0be02120-6100-40cb-b2de-2fed6d6e2196 req-78ca2427-7a2b-4c4a-b16b-7a120db0a7fd service nova] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.373496] env[62923]: DEBUG oslo_concurrency.lockutils [req-0be02120-6100-40cb-b2de-2fed6d6e2196 req-78ca2427-7a2b-4c4a-b16b-7a120db0a7fd service nova] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.373597] env[62923]: DEBUG oslo_concurrency.lockutils [req-0be02120-6100-40cb-b2de-2fed6d6e2196 req-78ca2427-7a2b-4c4a-b16b-7a120db0a7fd service nova] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.374339] env[62923]: DEBUG nova.compute.manager [req-0be02120-6100-40cb-b2de-2fed6d6e2196 req-78ca2427-7a2b-4c4a-b16b-7a120db0a7fd service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] No waiting events found dispatching network-vif-plugged-70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 896.374339] env[62923]: WARNING nova.compute.manager [req-0be02120-6100-40cb-b2de-2fed6d6e2196 req-78ca2427-7a2b-4c4a-b16b-7a120db0a7fd service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Received unexpected event network-vif-plugged-70dafc2e-d2a9-49fa-ac00-d46b002927bf for instance with vm_state building and task_state spawning. [ 896.411718] env[62923]: DEBUG nova.compute.manager [req-8d136ab7-dbeb-471c-89dd-6a5c95370587 req-f1f028b1-c889-4e2f-8d8b-15df53a7efd3 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Received event network-vif-deleted-d4711cea-8b7d-47e1-b973-510e3e511a8d {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.411975] env[62923]: INFO nova.compute.manager [req-8d136ab7-dbeb-471c-89dd-6a5c95370587 req-f1f028b1-c889-4e2f-8d8b-15df53a7efd3 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Neutron deleted interface d4711cea-8b7d-47e1-b973-510e3e511a8d; detaching it from the instance and deleting it from the info cache [ 896.412226] env[62923]: DEBUG nova.network.neutron [req-8d136ab7-dbeb-471c-89dd-6a5c95370587 req-f1f028b1-c889-4e2f-8d8b-15df53a7efd3 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.471352] env[62923]: ERROR nova.scheduler.client.report [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [req-6b95dc75-0b63-424d-9890-677943c30fdb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b95dc75-0b63-424d-9890-677943c30fdb"}]} [ 896.488739] env[62923]: DEBUG nova.scheduler.client.report [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 896.502565] env[62923]: DEBUG nova.scheduler.client.report [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 896.502810] env[62923]: DEBUG nova.compute.provider_tree [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 896.513465] env[62923]: DEBUG nova.scheduler.client.report [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 896.531353] env[62923]: DEBUG nova.scheduler.client.report [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 896.673656] env[62923]: DEBUG oslo_vmware.api [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370104, 'name': PowerOnVM_Task, 'duration_secs': 0.59802} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.675904] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.676140] env[62923]: INFO nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Took 9.20 seconds to spawn the instance on the hypervisor. [ 896.676326] env[62923]: DEBUG nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 896.677345] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc6d5ab-4879-458d-9013-e0ef0b6861d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.697688] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5577cd9f-4ec0-476d-bbe3-56a7d8427584 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.705491] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0900a244-8381-42df-8c82-29369817351d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.736671] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ba7130-37a8-4d8b-97fa-f41c9495fc6b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.744600] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3100fa-cd1a-44f2-9b48-2e58d62a574c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.758120] env[62923]: DEBUG nova.compute.provider_tree [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 896.866434] env[62923]: DEBUG nova.network.neutron [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Successfully updated port: 70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.890143] env[62923]: DEBUG nova.network.neutron [-] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.915914] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32c14bc9-f881-4989-b84c-bda927bbd6c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.924646] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d0ec05-b56b-436d-862e-4b8d932cd298 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.950845] env[62923]: DEBUG nova.compute.manager [req-8d136ab7-dbeb-471c-89dd-6a5c95370587 req-f1f028b1-c889-4e2f-8d8b-15df53a7efd3 service nova] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Detach interface failed, port_id=d4711cea-8b7d-47e1-b973-510e3e511a8d, reason: Instance e6752138-5d66-469d-ac56-6bd169ad166e could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 897.194728] env[62923]: INFO nova.compute.manager [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Took 21.68 seconds to build instance. [ 897.288148] env[62923]: DEBUG nova.scheduler.client.report [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 897.288421] env[62923]: DEBUG nova.compute.provider_tree [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 98 to 99 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 897.288611] env[62923]: DEBUG nova.compute.provider_tree [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.369957] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.370131] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.370274] env[62923]: DEBUG nova.network.neutron [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.391946] env[62923]: INFO nova.compute.manager [-] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Took 1.35 seconds to deallocate network for instance. [ 897.697698] env[62923]: DEBUG oslo_concurrency.lockutils [None req-abee94c1-ad39-4896-be5a-28d3c1055217 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.187s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.793842] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.128s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.794438] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 897.797044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.282s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.898213] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.901067] env[62923]: DEBUG nova.network.neutron [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.939933] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.940130] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.037417] env[62923]: DEBUG nova.network.neutron [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating instance_info_cache with network_info: [{"id": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "address": "fa:16:3e:79:27:67", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dafc2e-d2", "ovs_interfaceid": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.300512] env[62923]: DEBUG nova.compute.utils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 898.304447] env[62923]: INFO nova.compute.claims [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.308170] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 898.308343] env[62923]: DEBUG nova.network.neutron [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 898.362395] env[62923]: DEBUG nova.policy [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 898.436015] env[62923]: DEBUG nova.compute.manager [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Received event network-changed-70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.436110] env[62923]: DEBUG nova.compute.manager [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Refreshing instance network info cache due to event network-changed-70dafc2e-d2a9-49fa-ac00-d46b002927bf. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 898.436305] env[62923]: DEBUG oslo_concurrency.lockutils [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] Acquiring lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.539786] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.540171] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Instance network_info: |[{"id": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "address": "fa:16:3e:79:27:67", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dafc2e-d2", "ovs_interfaceid": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 898.540492] env[62923]: DEBUG oslo_concurrency.lockutils [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] Acquired lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.540680] env[62923]: DEBUG nova.network.neutron [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Refreshing network info cache for port 70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.541992] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:27:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1d25020-c621-4388-ac1d-de55bfefbe50', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70dafc2e-d2a9-49fa-ac00-d46b002927bf', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.552209] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Creating folder: Project (a855374ba4624ee78230d07b85b2ab8b). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 898.555093] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51d19e74-9418-4039-9e60-f66e59112c9c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.564950] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Created folder: Project (a855374ba4624ee78230d07b85b2ab8b) in parent group-v291405. [ 898.565371] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Creating folder: Instances. Parent ref: group-v291482. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 898.565999] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d004b60-eb69-4f36-9d1c-1b8de968554a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.575859] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Created folder: Instances in parent group-v291482. [ 898.577696] env[62923]: DEBUG oslo.service.loopingcall [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.577696] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.577696] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74cb522a-1be2-4330-b156-6a8f753eda8c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.607841] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.607841] env[62923]: value = "task-1370108" [ 898.607841] env[62923]: _type = "Task" [ 898.607841] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.613971] env[62923]: DEBUG nova.network.neutron [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Successfully created port: 69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.619719] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370108, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.809011] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 898.813477] env[62923]: INFO nova.compute.resource_tracker [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating resource usage from migration 1e89a30e-5033-486f-a50a-1bde86bc2874 [ 898.940094] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 899.016287] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff25f3b-4637-4cb5-9af5-8f54d28da3e6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.023967] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b146ea77-9d09-497d-8b21-470e978a568e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.055836] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b278856-e12b-4664-92c3-c16608ac5353 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.063450] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bc2b34-c16f-42d6-819c-23cd93b2fee1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.076642] env[62923]: DEBUG nova.compute.provider_tree [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.113442] env[62923]: DEBUG nova.compute.manager [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.117643] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f955f9-c891-4d86-a61e-afd844ee28ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.120043] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370108, 'name': CreateVM_Task, 'duration_secs': 0.389997} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.120198] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.121576] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.121761] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.122119] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.123960] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a394d1-027c-4917-950c-27d34b805b98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.135050] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 899.135050] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98d5c-3d0c-cee0-3cde-2f17c3d61fdd" [ 899.135050] env[62923]: _type = "Task" [ 899.135050] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.142691] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98d5c-3d0c-cee0-3cde-2f17c3d61fdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.407867] env[62923]: DEBUG nova.network.neutron [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updated VIF entry in instance network info cache for port 70dafc2e-d2a9-49fa-ac00-d46b002927bf. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.408272] env[62923]: DEBUG nova.network.neutron [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating instance_info_cache with network_info: [{"id": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "address": "fa:16:3e:79:27:67", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dafc2e-d2", "ovs_interfaceid": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.582015] env[62923]: DEBUG nova.scheduler.client.report [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 899.632748] env[62923]: INFO nova.compute.manager [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] instance snapshotting [ 899.638659] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae65132-9ff7-4750-ab6c-14633f6095e8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.647064] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98d5c-3d0c-cee0-3cde-2f17c3d61fdd, 'name': SearchDatastore_Task, 'duration_secs': 0.010683} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.659558] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.660038] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.660126] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.660255] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.663824] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.663824] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4101b6bf-473e-4ef0-a002-bd12a1ab11ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.663824] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f30dfe-b7ad-4cd9-b63c-d35479a91a51 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.673030] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.673222] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.674119] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e35b782-f7c4-4e00-b850-adc1318942bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.682019] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 899.682019] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52171ad4-c0d3-b905-37c0-ada71f9065d1" [ 899.682019] env[62923]: _type = "Task" [ 899.682019] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.687893] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52171ad4-c0d3-b905-37c0-ada71f9065d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.823071] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 899.843836] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 899.844117] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 899.844301] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.844505] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 899.844671] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.844821] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 899.845040] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 899.845209] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 899.845407] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 899.845578] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 899.845761] env[62923]: DEBUG nova.virt.hardware [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 899.847184] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adea6d9-abb8-4069-9b12-4921ac5698ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.855848] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e3b2d2-ae05-4ea8-9e04-747d0aff19eb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.911047] env[62923]: DEBUG oslo_concurrency.lockutils [req-6632b4ca-7b1c-4309-a7f0-cee20c8d4628 req-3bb79df7-b580-415d-a24b-702d1ff13270 service nova] Releasing lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.940018] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 899.940212] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 899.940510] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.085725] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.289s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.086227] env[62923]: INFO nova.compute.manager [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Migrating [ 900.086537] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.086698] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.087985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.006s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.091485] env[62923]: INFO nova.compute.claims [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.151732] env[62923]: DEBUG nova.network.neutron [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Successfully updated port: 69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.175156] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Creating Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 900.175482] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1097a535-25e3-4832-8d28-0e69a8551d2a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.186429] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 900.186429] env[62923]: value = "task-1370109" [ 900.186429] env[62923]: _type = "Task" [ 900.186429] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.193753] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52171ad4-c0d3-b905-37c0-ada71f9065d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009235} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.194979] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deabb2fc-1c8f-4526-ab3e-87508b98b6c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.200313] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370109, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.203485] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 900.203485] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e96a16-3109-20b1-9e57-32d0c2ca2463" [ 900.203485] env[62923]: _type = "Task" [ 900.203485] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.211214] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e96a16-3109-20b1-9e57-32d0c2ca2463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.347257] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.347532] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.444273] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.461494] env[62923]: DEBUG nova.compute.manager [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Received event network-vif-plugged-69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.461719] env[62923]: DEBUG oslo_concurrency.lockutils [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] Acquiring lock "0a9fdd83-3818-4831-90f9-9d30713961c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.461967] env[62923]: DEBUG oslo_concurrency.lockutils [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.462100] env[62923]: DEBUG oslo_concurrency.lockutils [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.462259] env[62923]: DEBUG nova.compute.manager [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] No waiting events found dispatching network-vif-plugged-69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 900.462580] env[62923]: WARNING nova.compute.manager [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Received unexpected event network-vif-plugged-69601284-7be9-4b00-9fde-93089f7b51c8 for instance with vm_state building and task_state spawning. [ 900.462708] env[62923]: DEBUG nova.compute.manager [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Received event network-changed-69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 900.462871] env[62923]: DEBUG nova.compute.manager [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Refreshing instance network info cache due to event network-changed-69601284-7be9-4b00-9fde-93089f7b51c8. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 900.463064] env[62923]: DEBUG oslo_concurrency.lockutils [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] Acquiring lock "refresh_cache-0a9fdd83-3818-4831-90f9-9d30713961c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.463207] env[62923]: DEBUG oslo_concurrency.lockutils [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] Acquired lock "refresh_cache-0a9fdd83-3818-4831-90f9-9d30713961c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.463368] env[62923]: DEBUG nova.network.neutron [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Refreshing network info cache for port 69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.593941] env[62923]: INFO nova.compute.rpcapi [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 900.594547] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.653353] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-0a9fdd83-3818-4831-90f9-9d30713961c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.697946] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370109, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.712460] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e96a16-3109-20b1-9e57-32d0c2ca2463, 'name': SearchDatastore_Task, 'duration_secs': 0.051925} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.712727] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.712985] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7/3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.713407] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42cad0ec-0af8-4184-ad03-7b5231a0f304 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.720044] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 900.720044] env[62923]: value = "task-1370110" [ 900.720044] env[62923]: _type = "Task" [ 900.720044] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.728109] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.850054] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 901.005114] env[62923]: DEBUG nova.network.neutron [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.088045] env[62923]: DEBUG nova.network.neutron [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.114771] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.114771] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.115365] env[62923]: DEBUG nova.network.neutron [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.201195] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370109, 'name': CreateSnapshot_Task, 'duration_secs': 0.68194} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.203842] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Created Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 901.204784] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7930311-51dd-4ded-87b1-58ff6a4f7054 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.230218] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48433} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.230483] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7/3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.230699] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.230944] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f930ede5-a6aa-4f65-9e97-27754961d2b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.238407] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 901.238407] env[62923]: value = "task-1370111" [ 901.238407] env[62923]: _type = "Task" [ 901.238407] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.247294] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.311257] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bfbe11-403d-4249-90c5-f3b926aad3dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.318944] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e53852-e1ff-4343-92c2-cfb2e0a055bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.347092] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33caebb-7894-4956-9426-e6932d097606 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.353502] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5949b0c-734c-4b29-9690-553b0a4400af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.368745] env[62923]: DEBUG nova.compute.provider_tree [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.370854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.590818] env[62923]: DEBUG oslo_concurrency.lockutils [req-8a5aa6d5-31f4-470c-94be-d262d41e2ccb req-510a1a47-0113-4180-980a-6b7aab4e4929 service nova] Releasing lock "refresh_cache-0a9fdd83-3818-4831-90f9-9d30713961c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.591255] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-0a9fdd83-3818-4831-90f9-9d30713961c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.591413] env[62923]: DEBUG nova.network.neutron [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.724899] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Creating linked-clone VM from snapshot {{(pid=62923) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 901.725220] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1eba54a3-5ef4-429b-9e2d-7b9a720bd995 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.733396] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 901.733396] env[62923]: value = "task-1370112" [ 901.733396] env[62923]: _type = "Task" [ 901.733396] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.740827] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370112, 'name': CloneVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.751576] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063281} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.751818] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.752620] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a4ff4f-c9ca-4186-9f7d-378c3e6b3ef6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.773399] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7/3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.773657] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9527ed9-9878-4de5-9914-6bf8196c16f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.793119] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 901.793119] env[62923]: value = "task-1370113" [ 901.793119] env[62923]: _type = "Task" [ 901.793119] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.800482] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370113, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.850196] env[62923]: DEBUG nova.network.neutron [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance_info_cache with network_info: [{"id": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "address": "fa:16:3e:57:cc:02", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb9a757-06", "ovs_interfaceid": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.872926] env[62923]: DEBUG nova.scheduler.client.report [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.122424] env[62923]: DEBUG nova.network.neutron [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.243724] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370112, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.251117] env[62923]: DEBUG nova.network.neutron [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Updating instance_info_cache with network_info: [{"id": "69601284-7be9-4b00-9fde-93089f7b51c8", "address": "fa:16:3e:02:29:77", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69601284-7b", "ovs_interfaceid": "69601284-7be9-4b00-9fde-93089f7b51c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.303506] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370113, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.352706] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.377979] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.378837] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 902.382674] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.863s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.382986] env[62923]: DEBUG nova.objects.instance [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lazy-loading 'resources' on Instance uuid 67a83e64-c8bd-499c-895a-11976d69195b {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.744554] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370112, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.753231] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-0a9fdd83-3818-4831-90f9-9d30713961c5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.753558] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Instance network_info: |[{"id": "69601284-7be9-4b00-9fde-93089f7b51c8", "address": "fa:16:3e:02:29:77", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69601284-7b", "ovs_interfaceid": "69601284-7be9-4b00-9fde-93089f7b51c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 902.754076] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:29:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69601284-7be9-4b00-9fde-93089f7b51c8', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 902.762107] env[62923]: DEBUG oslo.service.loopingcall [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 902.762369] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 902.762617] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfd39fef-8630-4c5a-8aeb-7473910d506f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.781751] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 902.781751] env[62923]: value = "task-1370114" [ 902.781751] env[62923]: _type = "Task" [ 902.781751] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.789258] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370114, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.803958] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370113, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.885659] env[62923]: DEBUG nova.compute.utils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.890078] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 902.890368] env[62923]: DEBUG nova.network.neutron [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 903.603012] env[62923]: DEBUG nova.policy [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37debff078b4389813658cbad297e65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0db41047d1004a1d9ca7f663178058da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 903.608848] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 903.631572] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370114, 'name': CreateVM_Task, 'duration_secs': 0.303458} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.632545] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370112, 'name': CloneVM_Task, 'duration_secs': 1.397009} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.632771] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370113, 'name': ReconfigVM_Task, 'duration_secs': 1.034511} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.632925] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 903.633195] env[62923]: INFO nova.virt.vmwareapi.vmops [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Created linked-clone VM from snapshot [ 903.633465] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7/3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.634596] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.634762] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.635322] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 903.636291] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336ce33b-c1c9-4e55-a8fe-5e79b23d4af1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.638703] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e12dd43c-3b6a-47f5-a14d-c18d627d7c88 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.642497] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5486c476-a192-44e4-ae7e-cfaeeae5b409 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.654258] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Uploading image 82a06d7c-e957-4bd3-97f2-0322af9583a1 {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 903.656277] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 903.656277] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521d5570-ffb7-6891-14cb-f7cd64c07a0d" [ 903.656277] env[62923]: _type = "Task" [ 903.656277] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.656550] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 903.656550] env[62923]: value = "task-1370115" [ 903.656550] env[62923]: _type = "Task" [ 903.656550] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.673960] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521d5570-ffb7-6891-14cb-f7cd64c07a0d, 'name': SearchDatastore_Task, 'duration_secs': 0.011395} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.680729] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.680985] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.681266] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.681419] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.681597] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.681867] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370115, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.682633] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7c1f851-e836-4c08-a89d-cd95f66f76a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.690253] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 903.690253] env[62923]: value = "vm-291486" [ 903.690253] env[62923]: _type = "VirtualMachine" [ 903.690253] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 903.690514] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fa235440-8b01-4b88-bea4-6021a46069de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.693052] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.693264] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 903.694270] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e224c8f6-27df-489c-9691-ce49ac3b705b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.701325] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lease: (returnval){ [ 903.701325] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa9220-eb35-ab48-06ef-656cd4d66c33" [ 903.701325] env[62923]: _type = "HttpNfcLease" [ 903.701325] env[62923]: } obtained for exporting VM: (result){ [ 903.701325] env[62923]: value = "vm-291486" [ 903.701325] env[62923]: _type = "VirtualMachine" [ 903.701325] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 903.701603] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the lease: (returnval){ [ 903.701603] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa9220-eb35-ab48-06ef-656cd4d66c33" [ 903.701603] env[62923]: _type = "HttpNfcLease" [ 903.701603] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 903.705546] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 903.705546] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5268978c-faeb-fd1a-ea1e-4151eb5b6db9" [ 903.705546] env[62923]: _type = "Task" [ 903.705546] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.710638] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.710638] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa9220-eb35-ab48-06ef-656cd4d66c33" [ 903.710638] env[62923]: _type = "HttpNfcLease" [ 903.710638] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 903.717800] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5268978c-faeb-fd1a-ea1e-4151eb5b6db9, 'name': SearchDatastore_Task, 'duration_secs': 0.008122} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.718598] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f800036a-5c5b-4430-8c2e-c9084364ec67 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.723211] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 903.723211] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520336c9-f7c1-8eba-b9e3-4916ca7ffd24" [ 903.723211] env[62923]: _type = "Task" [ 903.723211] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.730687] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520336c9-f7c1-8eba-b9e3-4916ca7ffd24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.828127] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56d9317-472b-41f8-ae4a-dfc4f555f5bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.835735] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb39e20f-62d1-4f1a-a7ca-c94a44800642 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.865988] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d07d14-99f7-424f-b793-ec453a84d36d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.873252] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632ebdcf-d8f1-4aa3-be18-48fbf60c3d16 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.885928] env[62923]: DEBUG nova.compute.provider_tree [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.128741] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f99f32-4772-44a9-acc8-8365504ac307 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.149526] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 0 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 904.171038] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370115, 'name': Rename_Task, 'duration_secs': 0.149123} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.171038] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.171261] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9006bdfe-9624-4357-bf19-cb30e776dd5b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.178464] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 904.178464] env[62923]: value = "task-1370117" [ 904.178464] env[62923]: _type = "Task" [ 904.178464] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.186917] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.211040] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 904.211040] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa9220-eb35-ab48-06ef-656cd4d66c33" [ 904.211040] env[62923]: _type = "HttpNfcLease" [ 904.211040] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 904.211431] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 904.211431] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa9220-eb35-ab48-06ef-656cd4d66c33" [ 904.211431] env[62923]: _type = "HttpNfcLease" [ 904.211431] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 904.212231] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb6d4fd-f179-44d7-a516-c8e27728d83a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.220276] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522926e4-7325-1573-f448-4a80d18045b6/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 904.220573] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522926e4-7325-1573-f448-4a80d18045b6/disk-0.vmdk for reading. {{(pid=62923) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 904.286729] env[62923]: DEBUG nova.network.neutron [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Successfully created port: 3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 904.295580] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520336c9-f7c1-8eba-b9e3-4916ca7ffd24, 'name': SearchDatastore_Task, 'duration_secs': 0.008589} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.296214] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.296533] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 0a9fdd83-3818-4831-90f9-9d30713961c5/0a9fdd83-3818-4831-90f9-9d30713961c5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.296844] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-674ddd79-3fd7-4377-b4d9-6e2993fe7968 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.303536] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 904.303536] env[62923]: value = "task-1370118" [ 904.303536] env[62923]: _type = "Task" [ 904.303536] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.311987] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.325522] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2ec73399-c93b-4fc7-82df-16901573cc23 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.388846] env[62923]: DEBUG nova.scheduler.client.report [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.629262] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 904.654493] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 904.654789] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 904.654955] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 904.655184] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 904.655377] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 904.655537] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 904.655745] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 904.655904] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 904.656097] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 904.656286] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 904.656614] env[62923]: DEBUG nova.virt.hardware [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 904.658514] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.659145] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55de6719-c687-4654-b2b3-1eb38fec4f54 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.662373] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a42939ae-b516-43f0-8c54-740b64659759 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.670927] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15213b8-2eec-4f72-8749-3fe862147938 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.676036] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 904.676036] env[62923]: value = "task-1370119" [ 904.676036] env[62923]: _type = "Task" [ 904.676036] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.700684] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.704645] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370117, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.814331] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370118, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.894886] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.512s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.897716] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.257s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.899232] env[62923]: INFO nova.compute.claims [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.918357] env[62923]: INFO nova.scheduler.client.report [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleted allocations for instance 67a83e64-c8bd-499c-895a-11976d69195b [ 905.186732] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370119, 'name': PowerOffVM_Task, 'duration_secs': 0.226706} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.187027] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.187214] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 17 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 905.202446] env[62923]: DEBUG oslo_vmware.api [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370117, 'name': PowerOnVM_Task, 'duration_secs': 0.540545} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.205210] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.205210] env[62923]: INFO nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Took 9.53 seconds to spawn the instance on the hypervisor. [ 905.205210] env[62923]: DEBUG nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 905.205210] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76719061-5345-4c4f-be76-aace1628e5af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.316035] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593888} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.316193] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 0a9fdd83-3818-4831-90f9-9d30713961c5/0a9fdd83-3818-4831-90f9-9d30713961c5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.316544] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.317115] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f24d795-d061-4b81-aa9a-af2815436e69 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.325030] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 905.325030] env[62923]: value = "task-1370120" [ 905.325030] env[62923]: _type = "Task" [ 905.325030] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.332842] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.427163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5e0830d-1ca0-4b47-8f27-223c0221be87 tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "67a83e64-c8bd-499c-895a-11976d69195b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.632s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.697879] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.698202] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.698434] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.698810] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.699029] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.699224] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.699467] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.699671] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.699842] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.700009] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.700187] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.705516] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-744e2251-6c4d-4622-bb52-a64b4a926b00 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.726509] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 905.726509] env[62923]: value = "task-1370121" [ 905.726509] env[62923]: _type = "Task" [ 905.726509] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.730047] env[62923]: INFO nova.compute.manager [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Took 24.53 seconds to build instance. [ 905.735828] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370121, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.773857] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "94d2670f-d858-437a-a166-d148a57e07ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.774179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "94d2670f-d858-437a-a166-d148a57e07ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.774383] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "94d2670f-d858-437a-a166-d148a57e07ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.774591] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "94d2670f-d858-437a-a166-d148a57e07ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.774766] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "94d2670f-d858-437a-a166-d148a57e07ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.777180] env[62923]: INFO nova.compute.manager [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Terminating instance [ 905.779676] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "refresh_cache-94d2670f-d858-437a-a166-d148a57e07ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.779676] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquired lock "refresh_cache-94d2670f-d858-437a-a166-d148a57e07ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.779676] env[62923]: DEBUG nova.network.neutron [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.788103] env[62923]: DEBUG nova.compute.manager [req-85883831-9522-4378-bc75-a30c1e941c4d req-06635ef8-a42d-46ca-91ca-cca482d436c4 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-vif-plugged-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 905.788351] env[62923]: DEBUG oslo_concurrency.lockutils [req-85883831-9522-4378-bc75-a30c1e941c4d req-06635ef8-a42d-46ca-91ca-cca482d436c4 service nova] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.788647] env[62923]: DEBUG oslo_concurrency.lockutils [req-85883831-9522-4378-bc75-a30c1e941c4d req-06635ef8-a42d-46ca-91ca-cca482d436c4 service nova] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.788785] env[62923]: DEBUG oslo_concurrency.lockutils [req-85883831-9522-4378-bc75-a30c1e941c4d req-06635ef8-a42d-46ca-91ca-cca482d436c4 service nova] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.789028] env[62923]: DEBUG nova.compute.manager [req-85883831-9522-4378-bc75-a30c1e941c4d req-06635ef8-a42d-46ca-91ca-cca482d436c4 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] No waiting events found dispatching network-vif-plugged-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 905.789170] env[62923]: WARNING nova.compute.manager [req-85883831-9522-4378-bc75-a30c1e941c4d req-06635ef8-a42d-46ca-91ca-cca482d436c4 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received unexpected event network-vif-plugged-3adfc18d-e45f-4eb0-8019-d5531853f63f for instance with vm_state building and task_state spawning. [ 905.835040] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066633} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.835040] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.837036] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d3ba84-5147-4592-a679-3061df92fdbd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.859385] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 0a9fdd83-3818-4831-90f9-9d30713961c5/0a9fdd83-3818-4831-90f9-9d30713961c5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.860495] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d19bff1b-2e26-4fd2-8990-6321dff996f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.882276] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 905.882276] env[62923]: value = "task-1370122" [ 905.882276] env[62923]: _type = "Task" [ 905.882276] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.891189] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.892114] env[62923]: DEBUG nova.network.neutron [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Successfully updated port: 3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.123172] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8319da24-07ac-4cac-b2fe-3bfe59b1736a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.131346] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ba4bea-8312-4c38-b18b-35f22e81066c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.167823] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59893031-c82a-49ac-82a5-faaea9beb7a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.177430] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4add03af-35f8-4645-8cbd-62d2f6dfee27 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.190545] env[62923]: DEBUG nova.compute.provider_tree [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.232730] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1ba40349-466c-4a1f-b46a-787d8505cf27 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.045s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.236898] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370121, 'name': ReconfigVM_Task, 'duration_secs': 0.258841} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.237035] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 33 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 906.301752] env[62923]: DEBUG nova.network.neutron [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.405825] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.405989] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.409168] env[62923]: DEBUG nova.network.neutron [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.410471] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370122, 'name': ReconfigVM_Task, 'duration_secs': 0.48339} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.411627] env[62923]: DEBUG nova.network.neutron [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.412908] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 0a9fdd83-3818-4831-90f9-9d30713961c5/0a9fdd83-3818-4831-90f9-9d30713961c5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.413862] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-739e0906-c7b8-45a1-94f8-d0abfc931af6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.422227] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 906.422227] env[62923]: value = "task-1370123" [ 906.422227] env[62923]: _type = "Task" [ 906.422227] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.432313] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370123, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.716037] env[62923]: ERROR nova.scheduler.client.report [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [req-794ab695-9fa8-46d5-a6be-5a91f19ec442] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-794ab695-9fa8-46d5-a6be-5a91f19ec442"}]} [ 906.733443] env[62923]: DEBUG nova.scheduler.client.report [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 906.743666] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.743902] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.744095] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.744291] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.744445] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.744594] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.744792] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.744975] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.745184] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.745513] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.745840] env[62923]: DEBUG nova.virt.hardware [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.751512] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Reconfiguring VM instance instance-0000004b to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 906.752502] env[62923]: DEBUG nova.scheduler.client.report [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 906.752709] env[62923]: DEBUG nova.compute.provider_tree [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.754611] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1419f755-b5db-4b36-93d3-7fb3538ad153 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.773682] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 906.773682] env[62923]: value = "task-1370124" [ 906.773682] env[62923]: _type = "Task" [ 906.773682] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.778192] env[62923]: DEBUG nova.scheduler.client.report [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 906.783113] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.797642] env[62923]: DEBUG nova.scheduler.client.report [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 906.914844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Releasing lock "refresh_cache-94d2670f-d858-437a-a166-d148a57e07ab" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.915495] env[62923]: DEBUG nova.compute.manager [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.915749] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.918361] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625ddf55-ff78-4d2b-9f28-ca5f4bade297 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.929465] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.929978] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e80e41e0-de8e-4732-8fe1-233e5272bf59 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.936230] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370123, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.939869] env[62923]: DEBUG oslo_vmware.api [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 906.939869] env[62923]: value = "task-1370125" [ 906.939869] env[62923]: _type = "Task" [ 906.939869] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.947437] env[62923]: DEBUG oslo_vmware.api [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.974965] env[62923]: DEBUG nova.network.neutron [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.981498] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9005ee0a-6b00-42a9-8c6b-8b265a38e889 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.989307] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d7c1d4-fcce-4255-ba70-bfdf79738cd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.023192] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c217a92e-7437-4d5e-9a93-52899a5a3205 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.030702] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47903f7c-13db-44f3-82b8-44dd7865900c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.043643] env[62923]: DEBUG nova.compute.provider_tree [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 907.241785] env[62923]: DEBUG nova.network.neutron [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.283778] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.434931] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370123, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.451093] env[62923]: DEBUG oslo_vmware.api [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370125, 'name': PowerOffVM_Task, 'duration_secs': 0.507984} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.451449] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.451647] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.451932] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6443e69d-b917-4689-a02b-1366dbe3e8e6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.473833] env[62923]: DEBUG oslo_concurrency.lockutils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.474094] env[62923]: DEBUG oslo_concurrency.lockutils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.487120] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.487344] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.487528] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleting the datastore file [datastore2] 94d2670f-d858-437a-a166-d148a57e07ab {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.487828] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ee71179-9000-48f3-af18-38d84e6c2643 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.493950] env[62923]: DEBUG oslo_vmware.api [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for the task: (returnval){ [ 907.493950] env[62923]: value = "task-1370127" [ 907.493950] env[62923]: _type = "Task" [ 907.493950] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.503113] env[62923]: DEBUG oslo_vmware.api [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.577503] env[62923]: DEBUG nova.scheduler.client.report [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 907.577801] env[62923]: DEBUG nova.compute.provider_tree [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 100 to 101 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 907.578015] env[62923]: DEBUG nova.compute.provider_tree [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 907.745373] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.745716] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Instance network_info: |[{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 907.746192] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:9d:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3adfc18d-e45f-4eb0-8019-d5531853f63f', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.753774] env[62923]: DEBUG oslo.service.loopingcall [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.754020] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.754414] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7206b80c-39f8-4a8e-8476-82b6d679c7e7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.774032] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.774032] env[62923]: value = "task-1370128" [ 907.774032] env[62923]: _type = "Task" [ 907.774032] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.785886] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.788922] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370124, 'name': ReconfigVM_Task, 'duration_secs': 0.73508} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.789191] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Reconfigured VM instance instance-0000004b to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 907.789949] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69167785-5ac6-437e-bbab-0c4a1c002ec1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.812108] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92/066da19f-daf0-44e3-8ae0-89f0c970cb92.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.812407] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-073e1e3c-cfcb-4986-a0e9-f3caaf4f271e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.827986] env[62923]: DEBUG nova.compute.manager [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.828252] env[62923]: DEBUG nova.compute.manager [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing instance network info cache due to event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 907.828478] env[62923]: DEBUG oslo_concurrency.lockutils [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.828626] env[62923]: DEBUG oslo_concurrency.lockutils [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.828790] env[62923]: DEBUG nova.network.neutron [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 907.835071] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 907.835071] env[62923]: value = "task-1370129" [ 907.835071] env[62923]: _type = "Task" [ 907.835071] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.845117] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370129, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.935366] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370123, 'name': Rename_Task, 'duration_secs': 1.043869} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.936034] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.936144] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-733eb210-3ad1-4c34-94f0-0abfa8a463a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.942909] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 907.942909] env[62923]: value = "task-1370130" [ 907.942909] env[62923]: _type = "Task" [ 907.942909] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.950682] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.977078] env[62923]: DEBUG nova.compute.utils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 908.003770] env[62923]: DEBUG oslo_vmware.api [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Task: {'id': task-1370127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109183} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.004081] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.004278] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.004461] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.004633] env[62923]: INFO nova.compute.manager [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Took 1.09 seconds to destroy the instance on the hypervisor. [ 908.005015] env[62923]: DEBUG oslo.service.loopingcall [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.005225] env[62923]: DEBUG nova.compute.manager [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 908.005350] env[62923]: DEBUG nova.network.neutron [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.020982] env[62923]: DEBUG nova.network.neutron [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.082836] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.185s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.083534] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 908.088487] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.190s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.088723] env[62923]: DEBUG nova.objects.instance [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lazy-loading 'resources' on Instance uuid e6752138-5d66-469d-ac56-6bd169ad166e {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.285068] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.344948] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370129, 'name': ReconfigVM_Task, 'duration_secs': 0.415963} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.345251] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92/066da19f-daf0-44e3-8ae0-89f0c970cb92.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.345537] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 50 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 908.453032] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370130, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.480419] env[62923]: DEBUG oslo_concurrency.lockutils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.523353] env[62923]: DEBUG nova.network.neutron [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.596649] env[62923]: DEBUG nova.compute.utils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 908.598966] env[62923]: DEBUG nova.network.neutron [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updated VIF entry in instance network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 908.600077] env[62923]: DEBUG nova.network.neutron [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.600649] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 908.600819] env[62923]: DEBUG nova.network.neutron [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 908.649266] env[62923]: DEBUG nova.policy [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c654b8365f5543f3bf713f3f5aa00654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a654d46357ed49cd95460a56926f102a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 908.788516] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.804387] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a103e4-ffc6-4bf7-ab5e-e2f4c19033dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.811475] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a805f7-cfe4-4ed9-b2c5-dbc505eb7bcd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.842573] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53c7b40-d26d-4706-b2f8-cd08214ac589 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.850021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39af32e5-76d1-4bc0-b716-5118554b4e3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.855811] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe24641-1fc5-40f5-b3b1-97d68ebb29ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.868103] env[62923]: DEBUG nova.compute.provider_tree [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.885422] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca7ffb0-67f2-4657-a990-6c50546e9a31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.903149] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 67 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 908.954912] env[62923]: DEBUG oslo_vmware.api [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370130, 'name': PowerOnVM_Task, 'duration_secs': 0.678118} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.955273] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 908.955555] env[62923]: INFO nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Took 9.13 seconds to spawn the instance on the hypervisor. [ 908.955749] env[62923]: DEBUG nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 908.956583] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a85950-edcb-49dc-885d-1d9b09c50c96 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.003474] env[62923]: DEBUG nova.network.neutron [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Successfully created port: 84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.025712] env[62923]: INFO nova.compute.manager [-] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Took 1.02 seconds to deallocate network for instance. [ 909.104319] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 909.108104] env[62923]: DEBUG oslo_concurrency.lockutils [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.108104] env[62923]: DEBUG nova.compute.manager [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Received event network-changed-70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.108104] env[62923]: DEBUG nova.compute.manager [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Refreshing instance network info cache due to event network-changed-70dafc2e-d2a9-49fa-ac00-d46b002927bf. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 909.108306] env[62923]: DEBUG oslo_concurrency.lockutils [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] Acquiring lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.108353] env[62923]: DEBUG oslo_concurrency.lockutils [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] Acquired lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.108527] env[62923]: DEBUG nova.network.neutron [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Refreshing network info cache for port 70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.286134] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.371014] env[62923]: DEBUG nova.scheduler.client.report [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.443256] env[62923]: DEBUG nova.network.neutron [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Port 0eb9a757-0625-4e00-a9b0-55888eb57e7b binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 909.475890] env[62923]: INFO nova.compute.manager [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Took 26.78 seconds to build instance. [ 909.533672] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.559203] env[62923]: DEBUG oslo_concurrency.lockutils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.559975] env[62923]: DEBUG oslo_concurrency.lockutils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.559975] env[62923]: INFO nova.compute.manager [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Attaching volume 47f39439-20e2-4392-8f87-83a201ccb62d to /dev/sdb [ 909.597242] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4f359c-580f-4cb7-ab07-7f3e433e17fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.605148] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ed64c4-f91a-4d22-9638-4f65cd2a63d1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.624377] env[62923]: DEBUG nova.virt.block_device [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updating existing volume attachment record: 0e600141-d70c-4ad1-9a24-9f5dc3b28e79 {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 909.789395] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.876768] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.788s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.878978] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.435s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.879189] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.879347] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 909.879647] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.509s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.881220] env[62923]: INFO nova.compute.claims [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.884738] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e28c714-05a9-4950-93d7-651253fd38d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.893016] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385d5527-4aa4-44c6-ad85-5611d32cde20 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.910829] env[62923]: INFO nova.scheduler.client.report [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Deleted allocations for instance e6752138-5d66-469d-ac56-6bd169ad166e [ 909.915027] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45278a9-75ae-48dc-a268-92771a4f08dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.924257] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d231dfc-833c-4720-82a8-242193b91f44 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.966699] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181441MB free_disk=147GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 909.966893] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.976678] env[62923]: DEBUG oslo_concurrency.lockutils [None req-168ebedc-27d9-48f5-9628-3093c02bd441 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.297s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.040919] env[62923]: DEBUG nova.network.neutron [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updated VIF entry in instance network info cache for port 70dafc2e-d2a9-49fa-ac00-d46b002927bf. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 910.040919] env[62923]: DEBUG nova.network.neutron [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating instance_info_cache with network_info: [{"id": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "address": "fa:16:3e:79:27:67", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dafc2e-d2", "ovs_interfaceid": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.129329] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 910.151349] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 910.151532] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 910.151712] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.151832] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 910.152049] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.152147] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 910.152346] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 910.152511] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 910.152792] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 910.152891] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 910.153090] env[62923]: DEBUG nova.virt.hardware [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 910.154010] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505963ca-b827-4844-8b67-fa3d034a2280 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.162700] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6daa221a-bf68-4ac1-8ea4-881f43d3f427 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.287876] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.424388] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dac5b9ad-3113-47f4-9dd2-93fd1593319a tempest-ServerTagsTestJSON-102379845 tempest-ServerTagsTestJSON-102379845-project-member] Lock "e6752138-5d66-469d-ac56-6bd169ad166e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.489s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.494957] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.494957] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.494957] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.544273] env[62923]: DEBUG oslo_concurrency.lockutils [req-35774531-0d1e-4349-936c-7ee86bddb99a req-db28b05d-e637-416a-bb07-132fa27eb705 service nova] Releasing lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.569698] env[62923]: DEBUG nova.compute.manager [req-96075b9a-2765-41ab-85c6-aec55d8545f5 req-b9faaea1-535f-4591-9c30-030446c932e7 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Received event network-vif-plugged-84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.569954] env[62923]: DEBUG oslo_concurrency.lockutils [req-96075b9a-2765-41ab-85c6-aec55d8545f5 req-b9faaea1-535f-4591-9c30-030446c932e7 service nova] Acquiring lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.570185] env[62923]: DEBUG oslo_concurrency.lockutils [req-96075b9a-2765-41ab-85c6-aec55d8545f5 req-b9faaea1-535f-4591-9c30-030446c932e7 service nova] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.570357] env[62923]: DEBUG oslo_concurrency.lockutils [req-96075b9a-2765-41ab-85c6-aec55d8545f5 req-b9faaea1-535f-4591-9c30-030446c932e7 service nova] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.570529] env[62923]: DEBUG nova.compute.manager [req-96075b9a-2765-41ab-85c6-aec55d8545f5 req-b9faaea1-535f-4591-9c30-030446c932e7 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] No waiting events found dispatching network-vif-plugged-84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 910.570696] env[62923]: WARNING nova.compute.manager [req-96075b9a-2765-41ab-85c6-aec55d8545f5 req-b9faaea1-535f-4591-9c30-030446c932e7 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Received unexpected event network-vif-plugged-84ea0cf0-a773-40ce-946a-65371afd534b for instance with vm_state building and task_state spawning. [ 910.701865] env[62923]: DEBUG nova.network.neutron [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Successfully updated port: 84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 910.790272] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370128, 'name': CreateVM_Task, 'duration_secs': 2.79661} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.790466] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.791682] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.791830] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.792177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 910.792439] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1f04ac2-a6c5-4db1-8815-f21f4e478a49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.797652] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 910.797652] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a218ec-75fd-fc8f-702d-6bc7a74ec087" [ 910.797652] env[62923]: _type = "Task" [ 910.797652] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.806095] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a218ec-75fd-fc8f-702d-6bc7a74ec087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.086836] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46443ec1-c1a1-438a-9393-3e3909bf58ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.095568] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3b5c1b-2281-410f-9547-3ae81c5e0e99 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.132080] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243ad9b9-239e-4e69-8728-dc0d92fcc184 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.139042] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "906470fc-5fec-4c98-8a38-337361e12bc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.139042] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "906470fc-5fec-4c98-8a38-337361e12bc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.146021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4329d45d-d373-4c63-acd1-31af73ff4bc6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.161061] env[62923]: DEBUG nova.compute.provider_tree [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.208977] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-3f60e93d-15ae-4fe4-ba86-6b6b123b645c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.209196] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-3f60e93d-15ae-4fe4-ba86-6b6b123b645c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.209391] env[62923]: DEBUG nova.network.neutron [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.309734] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a218ec-75fd-fc8f-702d-6bc7a74ec087, 'name': SearchDatastore_Task, 'duration_secs': 0.010097} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.310194] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.310244] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.310453] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.310602] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.310778] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.311065] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8dc8142-d5ae-4d02-aa2c-ce92e9860f3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.320396] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.320586] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.321423] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d91924f-981c-4cb9-be89-85166261710f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.327051] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 911.327051] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cb7abd-de2e-c20b-e08f-d6b4b2ee5754" [ 911.327051] env[62923]: _type = "Task" [ 911.327051] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.335242] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cb7abd-de2e-c20b-e08f-d6b4b2ee5754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.557238] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.557457] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.557648] env[62923]: DEBUG nova.network.neutron [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.646547] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.667222] env[62923]: DEBUG nova.scheduler.client.report [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.785662] env[62923]: DEBUG nova.network.neutron [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.838080] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cb7abd-de2e-c20b-e08f-d6b4b2ee5754, 'name': SearchDatastore_Task, 'duration_secs': 0.009348} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.839120] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cee1f3e6-b62b-4226-901b-fe41ebc8e3ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.845988] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 911.845988] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299c3e5-a9eb-6fc3-f15f-2b256e86f346" [ 911.845988] env[62923]: _type = "Task" [ 911.845988] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.860973] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299c3e5-a9eb-6fc3-f15f-2b256e86f346, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.135341] env[62923]: DEBUG nova.network.neutron [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Updating instance_info_cache with network_info: [{"id": "84ea0cf0-a773-40ce-946a-65371afd534b", "address": "fa:16:3e:7e:d7:a9", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ea0cf0-a7", "ovs_interfaceid": "84ea0cf0-a773-40ce-946a-65371afd534b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.172635] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.173109] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 912.180513] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.180828] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.647s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.181066] env[62923]: DEBUG nova.objects.instance [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lazy-loading 'resources' on Instance uuid 94d2670f-d858-437a-a166-d148a57e07ab {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.357097] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299c3e5-a9eb-6fc3-f15f-2b256e86f346, 'name': SearchDatastore_Task, 'duration_secs': 0.014077} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.357446] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.357757] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] f52f5912-d6e8-4da5-ac39-65bb065b6555/f52f5912-d6e8-4da5-ac39-65bb065b6555.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.358074] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26206327-5244-41fe-8590-818199300a72 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.365382] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 912.365382] env[62923]: value = "task-1370135" [ 912.365382] env[62923]: _type = "Task" [ 912.365382] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.371242] env[62923]: DEBUG nova.network.neutron [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance_info_cache with network_info: [{"id": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "address": "fa:16:3e:57:cc:02", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb9a757-06", "ovs_interfaceid": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.375937] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.604335] env[62923]: DEBUG nova.compute.manager [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Received event network-changed-84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.604557] env[62923]: DEBUG nova.compute.manager [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Refreshing instance network info cache due to event network-changed-84ea0cf0-a773-40ce-946a-65371afd534b. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 912.604749] env[62923]: DEBUG oslo_concurrency.lockutils [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] Acquiring lock "refresh_cache-3f60e93d-15ae-4fe4-ba86-6b6b123b645c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.638452] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-3f60e93d-15ae-4fe4-ba86-6b6b123b645c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.638617] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Instance network_info: |[{"id": "84ea0cf0-a773-40ce-946a-65371afd534b", "address": "fa:16:3e:7e:d7:a9", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ea0cf0-a7", "ovs_interfaceid": "84ea0cf0-a773-40ce-946a-65371afd534b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 912.639066] env[62923]: DEBUG oslo_concurrency.lockutils [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] Acquired lock "refresh_cache-3f60e93d-15ae-4fe4-ba86-6b6b123b645c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.639305] env[62923]: DEBUG nova.network.neutron [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Refreshing network info cache for port 84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.641070] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:d7:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84ea0cf0-a773-40ce-946a-65371afd534b', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.653996] env[62923]: DEBUG oslo.service.loopingcall [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.657821] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 912.658989] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-981b1558-26bc-44aa-a754-d1b29fc4898f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.691024] env[62923]: DEBUG nova.compute.utils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.695393] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.695393] env[62923]: value = "task-1370136" [ 912.695393] env[62923]: _type = "Task" [ 912.695393] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.698285] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 912.700216] env[62923]: DEBUG nova.network.neutron [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 912.717941] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370136, 'name': CreateVM_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.788042] env[62923]: DEBUG nova.policy [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef8830870c4c436eabea2c35bdf1721c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cbec40cae345480d80d07ad9abaa1297', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 912.883209] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.903647] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370135, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.987324] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3fa61c-731c-451a-bc7b-a65833475c45 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.996499] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24be11b-228a-4ea3-a7e7-f87d6dc15838 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.032209] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd3f4a1-cb72-43a6-b0d1-a5e5310823a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.043130] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffafc28-fa6e-426c-b27f-abc315fc7a7d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.060798] env[62923]: DEBUG nova.compute.provider_tree [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.146349] env[62923]: DEBUG nova.network.neutron [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Updated VIF entry in instance network info cache for port 84ea0cf0-a773-40ce-946a-65371afd534b. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.146861] env[62923]: DEBUG nova.network.neutron [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Updating instance_info_cache with network_info: [{"id": "84ea0cf0-a773-40ce-946a-65371afd534b", "address": "fa:16:3e:7e:d7:a9", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84ea0cf0-a7", "ovs_interfaceid": "84ea0cf0-a773-40ce-946a-65371afd534b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.199518] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 913.214524] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370136, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.380760] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54241} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.381239] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] f52f5912-d6e8-4da5-ac39-65bb065b6555/f52f5912-d6e8-4da5-ac39-65bb065b6555.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.381717] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.382257] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb525b06-bc03-442d-a11b-17e44a65ca48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.389147] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 913.389147] env[62923]: value = "task-1370137" [ 913.389147] env[62923]: _type = "Task" [ 913.389147] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.403085] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.417409] env[62923]: DEBUG nova.network.neutron [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Successfully created port: 9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.422010] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c615c0-e694-4721-9b9b-f12ac796c594 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.443120] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4ae779-24e4-4888-a9c0-e7b68259916a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.450357] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 83 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 913.564758] env[62923]: DEBUG nova.scheduler.client.report [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 913.650235] env[62923]: DEBUG oslo_concurrency.lockutils [req-2d303083-349b-44ef-9b90-462ce01a9928 req-125c4fd5-573c-4c77-9ccc-859f824f1682 service nova] Releasing lock "refresh_cache-3f60e93d-15ae-4fe4-ba86-6b6b123b645c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.705136] env[62923]: INFO nova.virt.block_device [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Booting with volume 25dfd9ac-5161-4012-80a4-4fb573e0e4ca at /dev/sda [ 913.719529] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370136, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.739501] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a447d727-e0ca-4b57-ade1-804ab6a4af5e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.748236] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32969964-94a5-4c6b-8fb0-8f5b47f055ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.778233] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79f79996-c963-4f1a-a2e3-5a29b89c89ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.787024] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd9149a-ff7e-4693-b7a9-41dce79218ca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.818639] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8cdad8-0131-4691-a76b-183ff84ef01f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.824966] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9714ec-37fb-4675-adf2-464368873553 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.829128] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522926e4-7325-1573-f448-4a80d18045b6/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 913.829901] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399347b0-e751-4ef0-8e45-9f4829e36c21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.836012] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522926e4-7325-1573-f448-4a80d18045b6/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 913.836185] env[62923]: ERROR oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522926e4-7325-1573-f448-4a80d18045b6/disk-0.vmdk due to incomplete transfer. [ 913.836418] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-17eae7b3-1768-4234-9e4b-33a77e82655c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.842290] env[62923]: DEBUG nova.virt.block_device [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updating existing volume attachment record: bebc0bbd-bbed-4db3-b716-ffddbe793203 {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 913.845155] env[62923]: DEBUG oslo_vmware.rw_handles [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/522926e4-7325-1573-f448-4a80d18045b6/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 913.845388] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Uploaded image 82a06d7c-e957-4bd3-97f2-0322af9583a1 to the Glance image server {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 913.847552] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Destroying the VM {{(pid=62923) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 913.848093] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a14120ef-9050-429c-a058-ec2ff3398796 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.853033] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 913.853033] env[62923]: value = "task-1370138" [ 913.853033] env[62923]: _type = "Task" [ 913.853033] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.860982] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370138, 'name': Destroy_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.898382] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063884} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.898663] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.899471] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2da601-d0cc-4c9f-a511-83f834db6bdb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.923420] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] f52f5912-d6e8-4da5-ac39-65bb065b6555/f52f5912-d6e8-4da5-ac39-65bb065b6555.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.923729] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d839b63b-f210-4056-a1a5-a4ea4f4e6f5e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.944050] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 913.944050] env[62923]: value = "task-1370139" [ 913.944050] env[62923]: _type = "Task" [ 913.944050] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.951973] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.957030] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.957126] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b761629-c6c7-43ec-96dc-92001fbb34dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.962637] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 913.962637] env[62923]: value = "task-1370140" [ 913.962637] env[62923]: _type = "Task" [ 913.962637] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.970647] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.074888] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.894s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.077376] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.110s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.111236] env[62923]: INFO nova.scheduler.client.report [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Deleted allocations for instance 94d2670f-d858-437a-a166-d148a57e07ab [ 914.180133] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 914.180407] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291490', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'name': 'volume-47f39439-20e2-4392-8f87-83a201ccb62d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '43065826-0f2b-48dc-bc42-8e0fd84fdcd3', 'attached_at': '', 'detached_at': '', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'serial': '47f39439-20e2-4392-8f87-83a201ccb62d'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 914.181485] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28764b1-ba3d-45ae-8b97-d6b9cabc61ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.202778] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117b1b81-fae4-47e7-bf5d-495114467ccf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.232962] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] volume-47f39439-20e2-4392-8f87-83a201ccb62d/volume-47f39439-20e2-4392-8f87-83a201ccb62d.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.234220] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da445ba5-2036-4d44-b14c-9940777a11bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.252906] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370136, 'name': CreateVM_Task, 'duration_secs': 1.471968} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.253817] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.254419] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.254660] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.255047] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 914.255333] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fece308-61a9-48ac-8663-516d668ea826 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.262176] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 914.262176] env[62923]: value = "task-1370141" [ 914.262176] env[62923]: _type = "Task" [ 914.262176] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.262176] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 914.262176] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d81583-55db-57fa-6f36-6a2b91360ec2" [ 914.262176] env[62923]: _type = "Task" [ 914.262176] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.271643] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370141, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.274933] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d81583-55db-57fa-6f36-6a2b91360ec2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.363248] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370138, 'name': Destroy_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.454688] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.473461] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370140, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.622395] env[62923]: DEBUG oslo_concurrency.lockutils [None req-46eb6e34-b364-4d9a-b21a-297f2238744f tempest-ServerShowV247Test-1961463418 tempest-ServerShowV247Test-1961463418-project-member] Lock "94d2670f-d858-437a-a166-d148a57e07ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.848s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.782097] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370141, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.788756] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d81583-55db-57fa-6f36-6a2b91360ec2, 'name': SearchDatastore_Task, 'duration_secs': 0.024234} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.789136] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.789495] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.790060] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.790139] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.790359] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.790730] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dd3beef-a218-4b8b-a47a-c26e26fc2a28 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.801758] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.802938] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.803649] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2231d8b-7121-40ab-b0fe-f5d1a1318e1b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.810959] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 914.810959] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c97092-8a0b-27ae-16f0-6077bc5b9bec" [ 914.810959] env[62923]: _type = "Task" [ 914.810959] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.822495] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c97092-8a0b-27ae-16f0-6077bc5b9bec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.865130] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370138, 'name': Destroy_Task, 'duration_secs': 0.522156} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.865524] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Destroyed the VM [ 914.865876] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Deleting Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 914.866153] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e6385ff4-51a3-423e-a3e7-9f10fbd45c55 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.873518] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 914.873518] env[62923]: value = "task-1370142" [ 914.873518] env[62923]: _type = "Task" [ 914.873518] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.885820] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370142, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.955691] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.976764] env[62923]: DEBUG oslo_vmware.api [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370140, 'name': PowerOnVM_Task, 'duration_secs': 0.777952} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.977098] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.977391] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9f3707-aa59-485d-b9fc-511587c9b739 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance '066da19f-daf0-44e3-8ae0-89f0c970cb92' progress to 100 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 915.000226] env[62923]: DEBUG nova.compute.manager [req-2f12afb9-5696-40e0-97c7-83e53dbb12ee req-a5b55451-fd9f-477e-b80c-4f686ec6d525 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Received event network-vif-plugged-9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.000469] env[62923]: DEBUG oslo_concurrency.lockutils [req-2f12afb9-5696-40e0-97c7-83e53dbb12ee req-a5b55451-fd9f-477e-b80c-4f686ec6d525 service nova] Acquiring lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.001023] env[62923]: DEBUG oslo_concurrency.lockutils [req-2f12afb9-5696-40e0-97c7-83e53dbb12ee req-a5b55451-fd9f-477e-b80c-4f686ec6d525 service nova] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.001023] env[62923]: DEBUG oslo_concurrency.lockutils [req-2f12afb9-5696-40e0-97c7-83e53dbb12ee req-a5b55451-fd9f-477e-b80c-4f686ec6d525 service nova] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.001023] env[62923]: DEBUG nova.compute.manager [req-2f12afb9-5696-40e0-97c7-83e53dbb12ee req-a5b55451-fd9f-477e-b80c-4f686ec6d525 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] No waiting events found dispatching network-vif-plugged-9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 915.001185] env[62923]: WARNING nova.compute.manager [req-2f12afb9-5696-40e0-97c7-83e53dbb12ee req-a5b55451-fd9f-477e-b80c-4f686ec6d525 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Received unexpected event network-vif-plugged-9d495caf-4038-4207-8c80-1309086eddfc for instance with vm_state building and task_state block_device_mapping. [ 915.089304] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Applying migration context for instance 066da19f-daf0-44e3-8ae0-89f0c970cb92 as it has an incoming, in-progress migration 1e89a30e-5033-486f-a50a-1bde86bc2874. Migration status is post-migrating {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 915.091776] env[62923]: INFO nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating resource usage from migration 1e89a30e-5033-486f-a50a-1bde86bc2874 [ 915.117242] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117242] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117242] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117242] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117551] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 60805eeb-8287-4064-9bd3-a7c6a21f40b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117551] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117551] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 0a9fdd83-3818-4831-90f9-9d30713961c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.117659] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Migration 1e89a30e-5033-486f-a50a-1bde86bc2874 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 915.118106] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 066da19f-daf0-44e3-8ae0-89f0c970cb92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.118218] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f52f5912-d6e8-4da5-ac39-65bb065b6555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.118256] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 3f60e93d-15ae-4fe4-ba86-6b6b123b645c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.118373] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 915.125246] env[62923]: DEBUG nova.network.neutron [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Successfully updated port: 9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.271950] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370141, 'name': ReconfigVM_Task, 'duration_secs': 0.712345} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.272318] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Reconfigured VM instance instance-00000049 to attach disk [datastore1] volume-47f39439-20e2-4392-8f87-83a201ccb62d/volume-47f39439-20e2-4392-8f87-83a201ccb62d.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.278616] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d18db13-6801-4aac-861d-f2994812020d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.297375] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 915.297375] env[62923]: value = "task-1370143" [ 915.297375] env[62923]: _type = "Task" [ 915.297375] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.309724] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370143, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.321960] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c97092-8a0b-27ae-16f0-6077bc5b9bec, 'name': SearchDatastore_Task, 'duration_secs': 0.02515} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.322783] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07db26ef-9b77-408b-9ab3-812635052c94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.328033] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 915.328033] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ce7292-8c60-bf31-388a-00a47e94c0c5" [ 915.328033] env[62923]: _type = "Task" [ 915.328033] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.336201] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ce7292-8c60-bf31-388a-00a47e94c0c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.385396] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370142, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.455263] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370139, 'name': ReconfigVM_Task, 'duration_secs': 1.12985} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.455708] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Reconfigured VM instance instance-00000052 to attach disk [datastore1] f52f5912-d6e8-4da5-ac39-65bb065b6555/f52f5912-d6e8-4da5-ac39-65bb065b6555.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.456357] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5cf13be-3f7f-42dc-b2c9-73083a423ddf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.462590] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 915.462590] env[62923]: value = "task-1370144" [ 915.462590] env[62923]: _type = "Task" [ 915.462590] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.471566] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370144, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.496523] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "41cc788d-9be8-4959-9cef-d91304f5879d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.497193] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "41cc788d-9be8-4959-9cef-d91304f5879d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.628475] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 906470fc-5fec-4c98-8a38-337361e12bc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 915.628475] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.628475] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquired lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.628475] env[62923]: DEBUG nova.network.neutron [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.807411] env[62923]: DEBUG oslo_vmware.api [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370143, 'name': ReconfigVM_Task, 'duration_secs': 0.161538} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.807692] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291490', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'name': 'volume-47f39439-20e2-4392-8f87-83a201ccb62d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '43065826-0f2b-48dc-bc42-8e0fd84fdcd3', 'attached_at': '', 'detached_at': '', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'serial': '47f39439-20e2-4392-8f87-83a201ccb62d'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 915.839170] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ce7292-8c60-bf31-388a-00a47e94c0c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009478} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.839301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.839572] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 3f60e93d-15ae-4fe4-ba86-6b6b123b645c/3f60e93d-15ae-4fe4-ba86-6b6b123b645c.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.839873] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9be956a-8619-4bb9-b9db-3172dd8f2585 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.846294] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 915.846294] env[62923]: value = "task-1370145" [ 915.846294] env[62923]: _type = "Task" [ 915.846294] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.854753] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.883468] env[62923]: DEBUG oslo_vmware.api [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370142, 'name': RemoveSnapshot_Task, 'duration_secs': 0.763824} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.883468] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Deleted Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 915.883678] env[62923]: INFO nova.compute.manager [None req-96715310-410b-4eb1-9e5c-b817d6407bb5 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Took 16.25 seconds to snapshot the instance on the hypervisor. [ 915.941171] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 915.942649] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.942929] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.943151] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.943502] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.944163] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.944756] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.945118] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.945320] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.945587] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.945761] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.946320] env[62923]: DEBUG nova.virt.hardware [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.947797] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed1b536-d5eb-4f8b-a9f3-a23e7708ff09 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.958511] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd7d98e-5acf-472b-acc1-d28fa8027ad8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.981754] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370144, 'name': Rename_Task, 'duration_secs': 0.160552} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.982107] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.982411] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46d11b80-914c-4623-b60c-9796c3ba5a3b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.993270] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 915.993270] env[62923]: value = "task-1370146" [ 915.993270] env[62923]: _type = "Task" [ 915.993270] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.002271] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 916.006249] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.131827] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 41cc788d-9be8-4959-9cef-d91304f5879d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.135058] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 916.135058] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 916.187926] env[62923]: DEBUG nova.network.neutron [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.360826] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370145, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.376305] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1d7eb0-7efc-4fdd-ab2f-533f6ad3a513 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.382370] env[62923]: DEBUG nova.network.neutron [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updating instance_info_cache with network_info: [{"id": "9d495caf-4038-4207-8c80-1309086eddfc", "address": "fa:16:3e:aa:f5:e5", "network": {"id": "221d8ea5-bee0-425f-9767-c86c9d0b69c1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-913155556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbec40cae345480d80d07ad9abaa1297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d495caf-40", "ovs_interfaceid": "9d495caf-4038-4207-8c80-1309086eddfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.396211] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c826f79-b5db-491a-af8b-d7a2c6c3cf18 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.440246] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d11fce-b4cf-4834-842b-d295e76a6986 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.451732] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0afee4-3a4a-490c-9edd-677596ee0fea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.471460] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.514164] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370146, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.528892] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.864044] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370145, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681534} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.866803] env[62923]: DEBUG nova.objects.instance [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'flavor' on Instance uuid 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.866803] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 3f60e93d-15ae-4fe4-ba86-6b6b123b645c/3f60e93d-15ae-4fe4-ba86-6b6b123b645c.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.867070] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.870540] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4a61bb4-1811-4b2f-88f2-5692f120cd40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.875140] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 916.875140] env[62923]: value = "task-1370147" [ 916.875140] env[62923]: _type = "Task" [ 916.875140] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.889907] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Releasing lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.889907] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance network_info: |[{"id": "9d495caf-4038-4207-8c80-1309086eddfc", "address": "fa:16:3e:aa:f5:e5", "network": {"id": "221d8ea5-bee0-425f-9767-c86c9d0b69c1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-913155556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbec40cae345480d80d07ad9abaa1297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d495caf-40", "ovs_interfaceid": "9d495caf-4038-4207-8c80-1309086eddfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 916.890137] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.890475] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:f5:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d495caf-4038-4207-8c80-1309086eddfc', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.900765] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Creating folder: Project (cbec40cae345480d80d07ad9abaa1297). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.900765] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e25fe63-ea91-4b15-96a5-742d735cddeb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.916472] env[62923]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 916.916761] env[62923]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62923) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 916.917492] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Folder already exists: Project (cbec40cae345480d80d07ad9abaa1297). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 916.917741] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Creating folder: Instances. Parent ref: group-v291474. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.918021] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64993d41-e8e8-4173-a241-b7ee31c8a334 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.928782] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Created folder: Instances in parent group-v291474. [ 916.928965] env[62923]: DEBUG oslo.service.loopingcall [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.929214] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.929480] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47ec8218-27c5-4444-97f2-1ffbd996e90d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.957382] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.957382] env[62923]: value = "task-1370150" [ 916.957382] env[62923]: _type = "Task" [ 916.957382] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.964874] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370150, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.974882] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.005117] env[62923]: DEBUG oslo_vmware.api [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370146, 'name': PowerOnVM_Task, 'duration_secs': 0.824218} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.005369] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 917.005924] env[62923]: INFO nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Took 12.38 seconds to spawn the instance on the hypervisor. [ 917.005924] env[62923]: DEBUG nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 917.006696] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4aa055-c592-4194-824e-3c6c22bc426d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.288646] env[62923]: DEBUG nova.compute.manager [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Received event network-changed-9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.288646] env[62923]: DEBUG nova.compute.manager [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Refreshing instance network info cache due to event network-changed-9d495caf-4038-4207-8c80-1309086eddfc. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 917.288646] env[62923]: DEBUG oslo_concurrency.lockutils [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] Acquiring lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.288646] env[62923]: DEBUG oslo_concurrency.lockutils [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] Acquired lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.288646] env[62923]: DEBUG nova.network.neutron [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Refreshing network info cache for port 9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.371770] env[62923]: DEBUG oslo_concurrency.lockutils [None req-61308a30-9102-48ad-a63d-8cb30284ff3a tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.811s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.385180] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073094} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.385823] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.387408] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcffdfb-a83c-48f8-b012-faa1e1975e9d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.413651] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 3f60e93d-15ae-4fe4-ba86-6b6b123b645c/3f60e93d-15ae-4fe4-ba86-6b6b123b645c.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.414333] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca0c5854-5978-4dcc-a76d-aff04cf13a3c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.434622] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 917.434622] env[62923]: value = "task-1370151" [ 917.434622] env[62923]: _type = "Task" [ 917.434622] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.443504] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.467319] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370150, 'name': CreateVM_Task, 'duration_secs': 0.452319} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.467487] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.468237] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291480', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'name': 'volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '65000ac5-1c28-4abe-bc96-c440f0b14d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'serial': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca'}, 'mount_device': '/dev/sda', 'attachment_id': 'bebc0bbd-bbed-4db3-b716-ffddbe793203', 'guest_format': None, 'disk_bus': None, 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62923) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 917.468449] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Root volume attach. Driver type: vmdk {{(pid=62923) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 917.469257] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c50396b-11d9-45c9-abeb-7f3073d44c6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.477766] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fb91da-589f-4a43-b804-b17db0d1faaf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.483243] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.483546] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.483748] env[62923]: DEBUG nova.compute.manager [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Going to confirm migration 1 {{(pid=62923) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 917.489155] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 917.489155] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.408s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.489155] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.306s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.489155] env[62923]: INFO nova.compute.claims [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.490673] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eff450c-2126-40be-861a-0ad7b6c6ae6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.498391] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-5baf5459-158d-410d-9604-8a0197b603d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.510177] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 917.510177] env[62923]: value = "task-1370152" [ 917.510177] env[62923]: _type = "Task" [ 917.510177] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.529612] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370152, 'name': RelocateVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.532323] env[62923]: INFO nova.compute.manager [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Took 32.47 seconds to build instance. [ 917.953395] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.995192] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.995546] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.020811] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370152, 'name': RelocateVM_Task, 'duration_secs': 0.411104} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.021406] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 918.021719] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291480', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'name': 'volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '65000ac5-1c28-4abe-bc96-c440f0b14d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'serial': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 918.022601] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5709ff08-cb32-4e4c-ac41-d481e944c5b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.040425] env[62923]: DEBUG oslo_concurrency.lockutils [None req-372fb603-42b5-4be3-acaf-ad889d98e50e tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.985s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.041624] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1080a9f-7cbd-415f-9314-613e3bbf7396 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.067407] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca/volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.067407] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9712276e-58d4-482c-9571-f69dab9757fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.088111] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 918.088111] env[62923]: value = "task-1370153" [ 918.088111] env[62923]: _type = "Task" [ 918.088111] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.096606] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.273488] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.273696] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.273872] env[62923]: DEBUG nova.network.neutron [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.274073] env[62923]: DEBUG nova.objects.instance [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lazy-loading 'info_cache' on Instance uuid 066da19f-daf0-44e3-8ae0-89f0c970cb92 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.400056] env[62923]: DEBUG nova.network.neutron [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updated VIF entry in instance network info cache for port 9d495caf-4038-4207-8c80-1309086eddfc. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.400056] env[62923]: DEBUG nova.network.neutron [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updating instance_info_cache with network_info: [{"id": "9d495caf-4038-4207-8c80-1309086eddfc", "address": "fa:16:3e:aa:f5:e5", "network": {"id": "221d8ea5-bee0-425f-9767-c86c9d0b69c1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-913155556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbec40cae345480d80d07ad9abaa1297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d495caf-40", "ovs_interfaceid": "9d495caf-4038-4207-8c80-1309086eddfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.451054] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370151, 'name': ReconfigVM_Task, 'duration_secs': 0.534433} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.451249] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 3f60e93d-15ae-4fe4-ba86-6b6b123b645c/3f60e93d-15ae-4fe4-ba86-6b6b123b645c.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.451894] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b08aa25a-66fd-43ee-841b-36651633b301 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.458389] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 918.458389] env[62923]: value = "task-1370154" [ 918.458389] env[62923]: _type = "Task" [ 918.458389] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.467707] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370154, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.499917] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 918.526664] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "b145b71c-c56b-4872-bb61-fa3e65fef04f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.526664] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.600471] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.741367] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f48a5c2-4298-4bf8-9809-4a4adcfc36e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.749204] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747baf09-568d-47a7-bc53-bcf4d3493ec1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.781309] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c46456-bafa-48b3-a7d7-41dc9a75c56c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.788881] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e1570d-b04f-4af0-9d7b-a41c6319d999 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.804268] env[62923]: DEBUG nova.compute.provider_tree [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.904402] env[62923]: DEBUG oslo_concurrency.lockutils [req-7b44468c-fa8a-4e85-bd6d-52a091b613cf req-7d5fc257-8782-4c27-8997-e8c926dd94a5 service nova] Releasing lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.951272] env[62923]: DEBUG nova.compute.manager [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.951272] env[62923]: DEBUG nova.compute.manager [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing instance network info cache due to event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 918.951272] env[62923]: DEBUG oslo_concurrency.lockutils [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.951272] env[62923]: DEBUG oslo_concurrency.lockutils [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.951272] env[62923]: DEBUG nova.network.neutron [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.970191] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370154, 'name': Rename_Task, 'duration_secs': 0.274956} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.970191] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.970609] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d946d65-96ae-4bff-9422-63322d04e709 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.980017] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 918.980017] env[62923]: value = "task-1370155" [ 918.980017] env[62923]: _type = "Task" [ 918.980017] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.985058] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.024146] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.031018] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 919.098294] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370153, 'name': ReconfigVM_Task, 'duration_secs': 0.644689} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.098735] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Reconfigured VM instance instance-00000054 to attach disk [datastore2] volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca/volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.103512] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8057f22c-f29f-46ad-83b2-68bbd7e3507a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.120986] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 919.120986] env[62923]: value = "task-1370156" [ 919.120986] env[62923]: _type = "Task" [ 919.120986] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.129730] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370156, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.308518] env[62923]: DEBUG nova.scheduler.client.report [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.488381] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370155, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.556697] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.631937] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370156, 'name': ReconfigVM_Task, 'duration_secs': 0.133363} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.632313] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291480', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'name': 'volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '65000ac5-1c28-4abe-bc96-c440f0b14d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'serial': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 919.636779] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4a45802-d9ef-4b75-8ce2-b09c63520401 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.645556] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 919.645556] env[62923]: value = "task-1370157" [ 919.645556] env[62923]: _type = "Task" [ 919.645556] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.656754] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370157, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.755830] env[62923]: DEBUG nova.network.neutron [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance_info_cache with network_info: [{"id": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "address": "fa:16:3e:57:cc:02", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eb9a757-06", "ovs_interfaceid": "0eb9a757-0625-4e00-a9b0-55888eb57e7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.812589] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.813120] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 919.819018] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.287s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.819018] env[62923]: INFO nova.compute.claims [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.850221] env[62923]: DEBUG nova.network.neutron [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updated VIF entry in instance network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.850651] env[62923]: DEBUG nova.network.neutron [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.987401] env[62923]: DEBUG oslo_vmware.api [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370155, 'name': PowerOnVM_Task, 'duration_secs': 0.581975} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.987982] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.988328] env[62923]: INFO nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Took 9.86 seconds to spawn the instance on the hypervisor. [ 919.988672] env[62923]: DEBUG nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 919.989559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e855b8a9-b6e8-4cea-81c2-ea970b3b7a29 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.157343] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370157, 'name': Rename_Task, 'duration_secs': 0.264936} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.157343] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.157343] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-992ee301-7d3d-4c6f-8c05-b80662f0c27e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.163112] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 920.163112] env[62923]: value = "task-1370158" [ 920.163112] env[62923]: _type = "Task" [ 920.163112] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.169386] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.261717] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-066da19f-daf0-44e3-8ae0-89f0c970cb92" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.261857] env[62923]: DEBUG nova.objects.instance [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lazy-loading 'migration_context' on Instance uuid 066da19f-daf0-44e3-8ae0-89f0c970cb92 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.321666] env[62923]: DEBUG nova.compute.utils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 920.325023] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 920.326261] env[62923]: DEBUG nova.network.neutron [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 920.354427] env[62923]: DEBUG oslo_concurrency.lockutils [req-2564a6ee-53ac-44e8-b846-69f670f18f95 req-a8eb6d97-f6a8-42ee-bb2b-6266e9e532c2 service nova] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.382871] env[62923]: DEBUG nova.policy [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 920.508071] env[62923]: INFO nova.compute.manager [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Took 25.89 seconds to build instance. [ 920.677995] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370158, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.721583] env[62923]: DEBUG nova.network.neutron [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Successfully created port: 5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.765217] env[62923]: DEBUG nova.objects.base [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Object Instance<066da19f-daf0-44e3-8ae0-89f0c970cb92> lazy-loaded attributes: info_cache,migration_context {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 920.766710] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bcac4c-955f-4cdc-a4fe-9351d10e83c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.786095] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba2afec3-85a3-4d6e-bd91-7179d02ced71 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.791762] env[62923]: DEBUG oslo_vmware.api [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 920.791762] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52161a51-1881-5b42-c7a8-eb814d2b693a" [ 920.791762] env[62923]: _type = "Task" [ 920.791762] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.800015] env[62923]: DEBUG oslo_vmware.api [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52161a51-1881-5b42-c7a8-eb814d2b693a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.825878] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 920.983616] env[62923]: DEBUG nova.compute.manager [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.983816] env[62923]: DEBUG nova.compute.manager [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing instance network info cache due to event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 920.984037] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.985024] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.985024] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.010467] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3e7ca61d-830a-4942-87d5-05691e666c02 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.405s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.092496] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e061096a-eac9-47df-83cd-86d8a50efb3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.102780] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d7cb3b-a241-45c5-8ddb-6109b952b491 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.138117] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf570322-e88a-402c-abf9-92baf3f92a64 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.145584] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e28962-386c-4092-bd73-4aa7267ebd6b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.159085] env[62923]: DEBUG nova.compute.provider_tree [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.172445] env[62923]: DEBUG oslo_vmware.api [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370158, 'name': PowerOnVM_Task, 'duration_secs': 0.737995} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.173211] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.173430] env[62923]: INFO nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Took 5.23 seconds to spawn the instance on the hypervisor. [ 921.173612] env[62923]: DEBUG nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 921.174370] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb3976c-22d6-4d77-a530-005d05ac262d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.218168] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.218487] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.218672] env[62923]: DEBUG nova.compute.manager [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 921.219588] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfaab9c-25ce-4f69-a944-93304d45fab5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.226424] env[62923]: DEBUG nova.compute.manager [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 921.226999] env[62923]: DEBUG nova.objects.instance [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'flavor' on Instance uuid 3f60e93d-15ae-4fe4-ba86-6b6b123b645c {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.303183] env[62923]: DEBUG oslo_vmware.api [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52161a51-1881-5b42-c7a8-eb814d2b693a, 'name': SearchDatastore_Task, 'duration_secs': 0.009849} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.303436] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.586915] env[62923]: DEBUG oslo_concurrency.lockutils [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.587202] env[62923]: DEBUG oslo_concurrency.lockutils [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.587440] env[62923]: INFO nova.compute.manager [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Rebooting instance [ 921.662449] env[62923]: DEBUG nova.scheduler.client.report [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.696235] env[62923]: INFO nova.compute.manager [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Took 20.34 seconds to build instance. [ 921.732195] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updated VIF entry in instance network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.732565] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.737036] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.737036] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d89717f3-1b6e-47cf-a0ab-642ea65e6ff1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.741824] env[62923]: DEBUG oslo_vmware.api [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 921.741824] env[62923]: value = "task-1370159" [ 921.741824] env[62923]: _type = "Task" [ 921.741824] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.752525] env[62923]: DEBUG oslo_vmware.api [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.838046] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 921.864632] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 921.864909] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 921.865108] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.865269] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 921.865442] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.865598] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 921.865811] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 921.866036] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 921.866221] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 921.866385] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 921.866570] env[62923]: DEBUG nova.virt.hardware [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 921.867568] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d544357a-55c2-43e2-a796-7c3a5c2baf30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.876297] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3f4142-ce3f-43a8-9620-0e9649d77d80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.116642] env[62923]: DEBUG oslo_concurrency.lockutils [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.116861] env[62923]: DEBUG oslo_concurrency.lockutils [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.117054] env[62923]: DEBUG nova.network.neutron [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.167896] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.168464] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 922.171330] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.147s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.172796] env[62923]: INFO nova.compute.claims [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.199242] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8d717848-4bbf-4a50-b959-00d5a7bad3fb tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.851s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.237240] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.237536] env[62923]: DEBUG nova.compute.manager [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.237725] env[62923]: DEBUG nova.compute.manager [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing instance network info cache due to event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 922.237952] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.238112] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.238371] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.254591] env[62923]: DEBUG oslo_vmware.api [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370159, 'name': PowerOffVM_Task, 'duration_secs': 0.255136} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.256659] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.257372] env[62923]: DEBUG nova.compute.manager [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 922.258265] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad8ae7f-4438-4636-b738-d30ddc25f3cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.397960] env[62923]: DEBUG nova.compute.manager [req-c68ca50c-249d-422c-8291-d45f2b09b712 req-218ee742-0246-42d4-a982-ad36ae45ba1f service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Received event network-vif-plugged-5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 922.398179] env[62923]: DEBUG oslo_concurrency.lockutils [req-c68ca50c-249d-422c-8291-d45f2b09b712 req-218ee742-0246-42d4-a982-ad36ae45ba1f service nova] Acquiring lock "906470fc-5fec-4c98-8a38-337361e12bc5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.398401] env[62923]: DEBUG oslo_concurrency.lockutils [req-c68ca50c-249d-422c-8291-d45f2b09b712 req-218ee742-0246-42d4-a982-ad36ae45ba1f service nova] Lock "906470fc-5fec-4c98-8a38-337361e12bc5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.398580] env[62923]: DEBUG oslo_concurrency.lockutils [req-c68ca50c-249d-422c-8291-d45f2b09b712 req-218ee742-0246-42d4-a982-ad36ae45ba1f service nova] Lock "906470fc-5fec-4c98-8a38-337361e12bc5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.399102] env[62923]: DEBUG nova.compute.manager [req-c68ca50c-249d-422c-8291-d45f2b09b712 req-218ee742-0246-42d4-a982-ad36ae45ba1f service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] No waiting events found dispatching network-vif-plugged-5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 922.399989] env[62923]: WARNING nova.compute.manager [req-c68ca50c-249d-422c-8291-d45f2b09b712 req-218ee742-0246-42d4-a982-ad36ae45ba1f service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Received unexpected event network-vif-plugged-5d5b7910-f073-428b-abd2-b725d57387b6 for instance with vm_state building and task_state spawning. [ 922.505659] env[62923]: DEBUG nova.network.neutron [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Successfully updated port: 5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.677670] env[62923]: DEBUG nova.compute.utils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 922.681037] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 922.681200] env[62923]: DEBUG nova.network.neutron [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.749347] env[62923]: DEBUG nova.policy [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ad76ea94b62472fa3318cbbdb308ebe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1559d2844647aba922cae8e9d992e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 922.775151] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9619ea16-9a38-4662-b574-6ab30a905bc3 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.557s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.012176] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-906470fc-5fec-4c98-8a38-337361e12bc5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.012176] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-906470fc-5fec-4c98-8a38-337361e12bc5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.012176] env[62923]: DEBUG nova.network.neutron [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.188131] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 923.211988] env[62923]: DEBUG nova.compute.manager [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Received event network-changed-9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 923.212113] env[62923]: DEBUG nova.compute.manager [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Refreshing instance network info cache due to event network-changed-9d495caf-4038-4207-8c80-1309086eddfc. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 923.212648] env[62923]: DEBUG oslo_concurrency.lockutils [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] Acquiring lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.212648] env[62923]: DEBUG oslo_concurrency.lockutils [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] Acquired lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.212648] env[62923]: DEBUG nova.network.neutron [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Refreshing network info cache for port 9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 923.278758] env[62923]: DEBUG nova.network.neutron [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.407598] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec09c50-5d10-4243-aa7b-5defbd4063d1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.421500] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1471755d-bae9-4e65-b206-b870e0478900 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.425832] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updated VIF entry in instance network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 923.426207] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.458747] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e1ce7b-8034-42da-899c-c7bb58c97547 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.466920] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157f4568-94dd-42f2-9c83-256c6e1a648e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.483653] env[62923]: DEBUG nova.compute.provider_tree [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.591819] env[62923]: DEBUG nova.network.neutron [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.610664] env[62923]: DEBUG nova.network.neutron [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Successfully created port: 24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.783866] env[62923]: DEBUG oslo_concurrency.lockutils [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.786275] env[62923]: DEBUG nova.compute.manager [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 923.787182] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401869e5-12c7-4a2e-8684-305fcb8d5df8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.930105] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.930399] env[62923]: DEBUG nova.compute.manager [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 923.930575] env[62923]: DEBUG nova.compute.manager [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing instance network info cache due to event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 923.930790] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.930935] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.931113] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 923.985517] env[62923]: DEBUG nova.scheduler.client.report [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.164152] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.164450] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.164673] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.164859] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.165039] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.167206] env[62923]: INFO nova.compute.manager [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Terminating instance [ 924.172850] env[62923]: DEBUG nova.compute.manager [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 924.173027] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.177016] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3277f2-bf8a-4056-b013-bf52c9e22a68 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.181842] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.182231] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd369c92-8a49-4b2c-911a-df1b4b05790b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.205995] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 924.251907] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ee595a8039257c31b458ce496975c480',container_format='bare',created_at=2024-10-29T12:08:08Z,direct_url=,disk_format='vmdk',id=82a06d7c-e957-4bd3-97f2-0322af9583a1,min_disk=1,min_ram=0,name='tempest-test-snap-1758427640',owner='2d1559d2844647aba922cae8e9d992e6',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-29T12:08:24Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.252414] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.252414] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.252546] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.252676] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.252830] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.253206] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.253206] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.253514] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.253718] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.253845] env[62923]: DEBUG nova.virt.hardware [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.254749] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1d6733-f486-455c-8960-1d7ebf2e5eba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.258224] env[62923]: DEBUG nova.network.neutron [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Updating instance_info_cache with network_info: [{"id": "5d5b7910-f073-428b-abd2-b725d57387b6", "address": "fa:16:3e:e8:33:43", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5b7910-f0", "ovs_interfaceid": "5d5b7910-f073-428b-abd2-b725d57387b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.263019] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.263019] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.263019] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleting the datastore file [datastore1] 3f60e93d-15ae-4fe4-ba86-6b6b123b645c {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.263019] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e28c80b-26c8-427a-b1f9-6dca22abe258 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.267234] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f94955-1841-4c21-8bcf-c29bc758c945 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.272421] env[62923]: DEBUG oslo_vmware.api [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 924.272421] env[62923]: value = "task-1370161" [ 924.272421] env[62923]: _type = "Task" [ 924.272421] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.287832] env[62923]: DEBUG oslo_vmware.api [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.490799] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.491341] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 924.494125] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.938s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.495637] env[62923]: INFO nova.compute.claims [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.597071] env[62923]: DEBUG nova.compute.manager [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Received event network-changed-5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.597286] env[62923]: DEBUG nova.compute.manager [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Refreshing instance network info cache due to event network-changed-5d5b7910-f073-428b-abd2-b725d57387b6. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 924.597478] env[62923]: DEBUG oslo_concurrency.lockutils [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] Acquiring lock "refresh_cache-906470fc-5fec-4c98-8a38-337361e12bc5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.670742] env[62923]: DEBUG nova.network.neutron [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updated VIF entry in instance network info cache for port 9d495caf-4038-4207-8c80-1309086eddfc. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.671764] env[62923]: DEBUG nova.network.neutron [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updating instance_info_cache with network_info: [{"id": "9d495caf-4038-4207-8c80-1309086eddfc", "address": "fa:16:3e:aa:f5:e5", "network": {"id": "221d8ea5-bee0-425f-9767-c86c9d0b69c1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-913155556-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cbec40cae345480d80d07ad9abaa1297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d495caf-40", "ovs_interfaceid": "9d495caf-4038-4207-8c80-1309086eddfc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.761883] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-906470fc-5fec-4c98-8a38-337361e12bc5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.762243] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Instance network_info: |[{"id": "5d5b7910-f073-428b-abd2-b725d57387b6", "address": "fa:16:3e:e8:33:43", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5b7910-f0", "ovs_interfaceid": "5d5b7910-f073-428b-abd2-b725d57387b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 924.762558] env[62923]: DEBUG oslo_concurrency.lockutils [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] Acquired lock "refresh_cache-906470fc-5fec-4c98-8a38-337361e12bc5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.762740] env[62923]: DEBUG nova.network.neutron [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Refreshing network info cache for port 5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.763991] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:33:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d5b7910-f073-428b-abd2-b725d57387b6', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.771454] env[62923]: DEBUG oslo.service.loopingcall [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.774438] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.774887] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b783a02f-7a33-4eee-a314-cd69810cc02d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.799383] env[62923]: DEBUG oslo_vmware.api [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397653} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.800652] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.800856] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.801047] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.801228] env[62923]: INFO nova.compute.manager [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Took 0.63 seconds to destroy the instance on the hypervisor. [ 924.801466] env[62923]: DEBUG oslo.service.loopingcall [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 924.801648] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.801648] env[62923]: value = "task-1370162" [ 924.801648] env[62923]: _type = "Task" [ 924.801648] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.801860] env[62923]: DEBUG nova.compute.manager [-] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 924.801956] env[62923]: DEBUG nova.network.neutron [-] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.809274] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9ba0e8-56d1-44d7-91fb-80b74168f190 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.817731] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370162, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.819683] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Doing hard reboot of VM {{(pid=62923) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 924.819934] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-ea926660-48e6-42ad-80ec-da2d805b1c8c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.825790] env[62923]: DEBUG oslo_vmware.api [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 924.825790] env[62923]: value = "task-1370163" [ 924.825790] env[62923]: _type = "Task" [ 924.825790] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.834219] env[62923]: DEBUG oslo_vmware.api [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370163, 'name': ResetVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.000075] env[62923]: DEBUG nova.compute.utils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.003573] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 925.003756] env[62923]: DEBUG nova.network.neutron [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.068772] env[62923]: DEBUG nova.policy [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bd260135a8e4b96b52e6aad41ff4e42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '418b805157a74173b5cfe13ea5b61c13', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 925.109498] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updated VIF entry in instance network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.109866] env[62923]: DEBUG nova.network.neutron [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.174787] env[62923]: DEBUG oslo_concurrency.lockutils [req-7846561a-6772-4ab4-bb3c-9919d7a41c21 req-3c24e0a4-bbf9-4ab9-82a3-8de56d161c56 service nova] Releasing lock "refresh_cache-65000ac5-1c28-4abe-bc96-c440f0b14d3d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.211937] env[62923]: DEBUG nova.network.neutron [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Updated VIF entry in instance network info cache for port 5d5b7910-f073-428b-abd2-b725d57387b6. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.212327] env[62923]: DEBUG nova.network.neutron [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Updating instance_info_cache with network_info: [{"id": "5d5b7910-f073-428b-abd2-b725d57387b6", "address": "fa:16:3e:e8:33:43", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d5b7910-f0", "ovs_interfaceid": "5d5b7910-f073-428b-abd2-b725d57387b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.254925] env[62923]: DEBUG nova.compute.manager [req-e00f5dee-e486-48a0-8411-6e1d9d54c82e req-6e54498f-b23b-4ef8-ad09-956a45d68e35 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Received event network-vif-deleted-84ea0cf0-a773-40ce-946a-65371afd534b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.255148] env[62923]: INFO nova.compute.manager [req-e00f5dee-e486-48a0-8411-6e1d9d54c82e req-6e54498f-b23b-4ef8-ad09-956a45d68e35 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Neutron deleted interface 84ea0cf0-a773-40ce-946a-65371afd534b; detaching it from the instance and deleting it from the info cache [ 925.255517] env[62923]: DEBUG nova.network.neutron [req-e00f5dee-e486-48a0-8411-6e1d9d54c82e req-6e54498f-b23b-4ef8-ad09-956a45d68e35 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.313337] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370162, 'name': CreateVM_Task, 'duration_secs': 0.338247} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.313731] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.314566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.314922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.315404] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 925.315823] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-794daec6-0d23-4051-80fb-787d91f371e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.321050] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 925.321050] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528ea4b9-d0bc-66a4-03b5-c6ae7fa03467" [ 925.321050] env[62923]: _type = "Task" [ 925.321050] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.333962] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528ea4b9-d0bc-66a4-03b5-c6ae7fa03467, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.337225] env[62923]: DEBUG oslo_vmware.api [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370163, 'name': ResetVM_Task, 'duration_secs': 0.101772} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.337547] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Did hard reboot of VM {{(pid=62923) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 925.337789] env[62923]: DEBUG nova.compute.manager [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 925.338586] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605594a5-20e9-4739-be96-ca9243a3047b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.424489] env[62923]: DEBUG nova.network.neutron [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Successfully created port: 545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.504837] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 925.612409] env[62923]: DEBUG oslo_concurrency.lockutils [req-e2abd67d-781e-46a4-b0a8-65aab792717c req-191b13cd-e58b-4274-bd7a-7279c68dcf22 service nova] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.622543] env[62923]: DEBUG nova.network.neutron [-] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.715296] env[62923]: DEBUG oslo_concurrency.lockutils [req-ad549df9-8c78-4923-8179-ad6649041eee req-0846ae55-706b-44bd-87d5-036cd412083a service nova] Releasing lock "refresh_cache-906470fc-5fec-4c98-8a38-337361e12bc5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.760421] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288c7b20-cebf-42a2-b5d7-552ffa623224 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.764152] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80d0b8d1-935e-49e2-ae23-0ea4fc01f234 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.772589] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142a99db-486c-46ae-9035-9b48c82430c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.779438] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60de4d33-e1f1-4ba2-ade8-c924e09db038 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.833658] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8527e7d2-d248-4379-b466-36a50e09e758 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.841535] env[62923]: DEBUG nova.compute.manager [req-e00f5dee-e486-48a0-8411-6e1d9d54c82e req-6e54498f-b23b-4ef8-ad09-956a45d68e35 service nova] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Detach interface failed, port_id=84ea0cf0-a773-40ce-946a-65371afd534b, reason: Instance 3f60e93d-15ae-4fe4-ba86-6b6b123b645c could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 925.842928] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "eaa654f9-023d-4514-930d-6bebd421325a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.842928] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.862836] env[62923]: DEBUG oslo_concurrency.lockutils [None req-69999641-8e64-48b5-8d29-8f794815c583 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.276s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.867691] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db4b622-e78b-4655-a51a-99b7047da87c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.871826] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528ea4b9-d0bc-66a4-03b5-c6ae7fa03467, 'name': SearchDatastore_Task, 'duration_secs': 0.024034} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.872808] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.873116] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.873511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.873511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.873652] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.875852] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73de13f5-c697-401e-922e-5d2a14fff59a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.884977] env[62923]: DEBUG nova.compute.provider_tree [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.895905] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.895905] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.895905] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7652d560-f049-4fb9-a475-0b9e49bf53ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.901512] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 925.901512] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525647d1-1548-47b6-b302-2bce48a2c127" [ 925.901512] env[62923]: _type = "Task" [ 925.901512] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.911376] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525647d1-1548-47b6-b302-2bce48a2c127, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.129507] env[62923]: INFO nova.compute.manager [-] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Took 1.33 seconds to deallocate network for instance. [ 926.135331] env[62923]: DEBUG nova.network.neutron [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Successfully updated port: 24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 926.349639] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 926.389636] env[62923]: DEBUG nova.scheduler.client.report [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.412532] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525647d1-1548-47b6-b302-2bce48a2c127, 'name': SearchDatastore_Task, 'duration_secs': 0.0114} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.413375] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78b2241a-3b7e-438c-98a9-994134e89518 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.418682] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 926.418682] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5221177f-c2c6-db12-fd83-6df4e29bb247" [ 926.418682] env[62923]: _type = "Task" [ 926.418682] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.426436] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5221177f-c2c6-db12-fd83-6df4e29bb247, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.517927] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 926.544376] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.544634] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.544797] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.544982] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.545156] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.545311] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.545583] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.545760] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.545929] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.546106] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.546280] env[62923]: DEBUG nova.virt.hardware [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.547149] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21556308-c653-4cc9-86e0-2d39a6d111ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.554768] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a3ce80-5f78-4615-a330-36f6ee3e924d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.588134] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "interface-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-1353213d-e2e0-4537-a849-37be48c686ac" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.588413] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-1353213d-e2e0-4537-a849-37be48c686ac" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.588781] env[62923]: DEBUG nova.objects.instance [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lazy-loading 'flavor' on Instance uuid 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.638263] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.638708] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "refresh_cache-41cc788d-9be8-4959-9cef-d91304f5879d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.638837] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "refresh_cache-41cc788d-9be8-4959-9cef-d91304f5879d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.638982] env[62923]: DEBUG nova.network.neutron [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 926.692118] env[62923]: DEBUG nova.compute.manager [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Received event network-vif-plugged-24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.692389] env[62923]: DEBUG oslo_concurrency.lockutils [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] Acquiring lock "41cc788d-9be8-4959-9cef-d91304f5879d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.692725] env[62923]: DEBUG oslo_concurrency.lockutils [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] Lock "41cc788d-9be8-4959-9cef-d91304f5879d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.692897] env[62923]: DEBUG oslo_concurrency.lockutils [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] Lock "41cc788d-9be8-4959-9cef-d91304f5879d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.693192] env[62923]: DEBUG nova.compute.manager [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] No waiting events found dispatching network-vif-plugged-24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 926.693375] env[62923]: WARNING nova.compute.manager [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Received unexpected event network-vif-plugged-24cd4887-ac99-48ba-bf0a-e5077d525b6c for instance with vm_state building and task_state spawning. [ 926.693577] env[62923]: DEBUG nova.compute.manager [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Received event network-changed-24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.693798] env[62923]: DEBUG nova.compute.manager [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Refreshing instance network info cache due to event network-changed-24cd4887-ac99-48ba-bf0a-e5077d525b6c. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.694011] env[62923]: DEBUG oslo_concurrency.lockutils [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] Acquiring lock "refresh_cache-41cc788d-9be8-4959-9cef-d91304f5879d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.876208] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.895287] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.899185] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 926.899328] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.596s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.930395] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5221177f-c2c6-db12-fd83-6df4e29bb247, 'name': SearchDatastore_Task, 'duration_secs': 0.042417} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.930606] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.930742] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 906470fc-5fec-4c98-8a38-337361e12bc5/906470fc-5fec-4c98-8a38-337361e12bc5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.931152] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e6a5b59-9032-49ff-a048-286b0bf3623c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.939607] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 926.939607] env[62923]: value = "task-1370164" [ 926.939607] env[62923]: _type = "Task" [ 926.939607] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.947914] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.988947] env[62923]: DEBUG nova.network.neutron [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Successfully updated port: 545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.172956] env[62923]: DEBUG nova.network.neutron [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.193960] env[62923]: DEBUG nova.objects.instance [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lazy-loading 'pci_requests' on Instance uuid 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.282734] env[62923]: DEBUG nova.compute.manager [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-vif-plugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.282897] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.283208] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.283304] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.283499] env[62923]: DEBUG nova.compute.manager [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] No waiting events found dispatching network-vif-plugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 927.283676] env[62923]: WARNING nova.compute.manager [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received unexpected event network-vif-plugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff for instance with vm_state building and task_state spawning. [ 927.283842] env[62923]: DEBUG nova.compute.manager [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.283998] env[62923]: DEBUG nova.compute.manager [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing instance network info cache due to event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 927.284197] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.284338] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.284497] env[62923]: DEBUG nova.network.neutron [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.304952] env[62923]: DEBUG nova.network.neutron [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Updating instance_info_cache with network_info: [{"id": "24cd4887-ac99-48ba-bf0a-e5077d525b6c", "address": "fa:16:3e:67:f9:77", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24cd4887-ac", "ovs_interfaceid": "24cd4887-ac99-48ba-bf0a-e5077d525b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.403865] env[62923]: DEBUG nova.compute.utils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 927.408434] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 927.408434] env[62923]: DEBUG nova.network.neutron [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.452079] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370164, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.461557] env[62923]: DEBUG nova.policy [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d2829faa8f74da8a1432abd0c2434f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d290a91b3b4d9491f755fd3d7e7894', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 927.490839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.629424] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e8fe4f-1fb2-4dfe-a008-6b4348423a14 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.637391] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2085c189-d34c-48f9-ac81-13f556f07cb6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.668126] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7a09c9-bde2-477f-b936-84341dbf97ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.675842] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff1f445-8891-4279-8f86-1a6e4a8851eb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.690271] env[62923]: DEBUG nova.compute.provider_tree [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.696855] env[62923]: DEBUG nova.objects.base [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Object Instance<8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7> lazy-loaded attributes: flavor,pci_requests {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 927.697183] env[62923]: DEBUG nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.769955] env[62923]: DEBUG nova.policy [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37debff078b4389813658cbad297e65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0db41047d1004a1d9ca7f663178058da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 927.808182] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "refresh_cache-41cc788d-9be8-4959-9cef-d91304f5879d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.808550] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Instance network_info: |[{"id": "24cd4887-ac99-48ba-bf0a-e5077d525b6c", "address": "fa:16:3e:67:f9:77", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24cd4887-ac", "ovs_interfaceid": "24cd4887-ac99-48ba-bf0a-e5077d525b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 927.808884] env[62923]: DEBUG oslo_concurrency.lockutils [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] Acquired lock "refresh_cache-41cc788d-9be8-4959-9cef-d91304f5879d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.809103] env[62923]: DEBUG nova.network.neutron [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Refreshing network info cache for port 24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.810373] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:f9:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24cd4887-ac99-48ba-bf0a-e5077d525b6c', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.819626] env[62923]: DEBUG oslo.service.loopingcall [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.820223] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.820401] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ff9a477-8da1-44f1-ad32-129825c3bb80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.837595] env[62923]: DEBUG nova.network.neutron [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Successfully created port: bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.845936] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.845936] env[62923]: value = "task-1370165" [ 927.845936] env[62923]: _type = "Task" [ 927.845936] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.854157] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370165, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.869481] env[62923]: DEBUG nova.network.neutron [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.908954] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 927.956602] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.945278} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.956602] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 906470fc-5fec-4c98-8a38-337361e12bc5/906470fc-5fec-4c98-8a38-337361e12bc5.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.956602] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.956602] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3346c004-4643-465c-b6ca-aff6ffbf932f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.964044] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 927.964044] env[62923]: value = "task-1370166" [ 927.964044] env[62923]: _type = "Task" [ 927.964044] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.974679] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.063057] env[62923]: DEBUG nova.network.neutron [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.194965] env[62923]: DEBUG nova.scheduler.client.report [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.356817] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370165, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.474598] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070243} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.474884] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.475731] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a9a6a6-08da-4a7f-aafe-839fef98bb89 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.497140] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 906470fc-5fec-4c98-8a38-337361e12bc5/906470fc-5fec-4c98-8a38-337361e12bc5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.499652] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b93c5e71-841e-44ad-895a-c5cae60cd02f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.520042] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 928.520042] env[62923]: value = "task-1370167" [ 928.520042] env[62923]: _type = "Task" [ 928.520042] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.528613] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.565400] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e0112b7-5850-423e-a2d0-89953daba93f req-9e4fe0e3-0042-46f3-a585-9bad67c45cd6 service nova] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.565855] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.566042] env[62923]: DEBUG nova.network.neutron [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.615602] env[62923]: DEBUG nova.network.neutron [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Updated VIF entry in instance network info cache for port 24cd4887-ac99-48ba-bf0a-e5077d525b6c. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.615949] env[62923]: DEBUG nova.network.neutron [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Updating instance_info_cache with network_info: [{"id": "24cd4887-ac99-48ba-bf0a-e5077d525b6c", "address": "fa:16:3e:67:f9:77", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24cd4887-ac", "ovs_interfaceid": "24cd4887-ac99-48ba-bf0a-e5077d525b6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.856334] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370165, 'name': CreateVM_Task, 'duration_secs': 0.576828} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.856525] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.857268] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.857387] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.857806] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 928.858087] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa514f94-f9c8-450d-96f8-b77c3633d3f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.862637] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 928.862637] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d21692-a8ac-f284-afcb-8c7908607c0a" [ 928.862637] env[62923]: _type = "Task" [ 928.862637] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.871947] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d21692-a8ac-f284-afcb-8c7908607c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.918489] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 928.943663] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 928.943960] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 928.944173] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 928.944366] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 928.944529] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 928.944684] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 928.944893] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 928.945065] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 928.945237] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 928.945402] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 928.945633] env[62923]: DEBUG nova.virt.hardware [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.946497] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2135900a-c059-4b30-856f-6f64e2a0162a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.954180] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4025e0c-54bf-4dec-9f6b-6d8e4fd73fe8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.031485] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370167, 'name': ReconfigVM_Task, 'duration_secs': 0.405461} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.031781] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 906470fc-5fec-4c98-8a38-337361e12bc5/906470fc-5fec-4c98-8a38-337361e12bc5.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.032428] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58e67b5a-2139-4dd2-b8b6-65724a4d43d1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.038449] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 929.038449] env[62923]: value = "task-1370168" [ 929.038449] env[62923]: _type = "Task" [ 929.038449] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.046466] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370168, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.097979] env[62923]: DEBUG nova.network.neutron [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.118881] env[62923]: DEBUG oslo_concurrency.lockutils [req-daa7cf30-da79-4185-a889-323b6cff89f7 req-7fd40886-0233-4279-a1bf-770e73ef6fd7 service nova] Releasing lock "refresh_cache-41cc788d-9be8-4959-9cef-d91304f5879d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.204778] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.305s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.212019] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.571s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.212019] env[62923]: DEBUG nova.objects.instance [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'resources' on Instance uuid 3f60e93d-15ae-4fe4-ba86-6b6b123b645c {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.289147] env[62923]: DEBUG nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Successfully updated port: 1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.359589] env[62923]: DEBUG nova.network.neutron [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap545dfb40-7a", "ovs_interfaceid": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.373920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.373920] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Processing image 82a06d7c-e957-4bd3-97f2-0322af9583a1 {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.373920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.373920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.374301] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.374585] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-568c2a91-1f3b-4e63-b45e-3663d383c20d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.393930] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.394142] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.394903] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8daf1fe4-51ef-4215-b069-dd25c626423a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.399897] env[62923]: DEBUG nova.compute.manager [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-vif-plugged-1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.400113] env[62923]: DEBUG oslo_concurrency.lockutils [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.400332] env[62923]: DEBUG oslo_concurrency.lockutils [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.400511] env[62923]: DEBUG oslo_concurrency.lockutils [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.400685] env[62923]: DEBUG nova.compute.manager [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] No waiting events found dispatching network-vif-plugged-1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 929.400798] env[62923]: WARNING nova.compute.manager [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received unexpected event network-vif-plugged-1353213d-e2e0-4537-a849-37be48c686ac for instance with vm_state active and task_state None. [ 929.401676] env[62923]: DEBUG nova.compute.manager [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-changed-1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.401676] env[62923]: DEBUG nova.compute.manager [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing instance network info cache due to event network-changed-1353213d-e2e0-4537-a849-37be48c686ac. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.401676] env[62923]: DEBUG oslo_concurrency.lockutils [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.401676] env[62923]: DEBUG oslo_concurrency.lockutils [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.401676] env[62923]: DEBUG nova.network.neutron [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing network info cache for port 1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.404920] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 929.404920] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f03875-2087-9a58-a40e-5f11f5e43201" [ 929.404920] env[62923]: _type = "Task" [ 929.404920] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.413328] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f03875-2087-9a58-a40e-5f11f5e43201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.548417] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370168, 'name': Rename_Task, 'duration_secs': 0.158173} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.548637] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.548886] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-445a4a2f-17d0-422d-aa68-4b770ff6590a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.555096] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 929.555096] env[62923]: value = "task-1370169" [ 929.555096] env[62923]: _type = "Task" [ 929.555096] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.562365] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.779973] env[62923]: INFO nova.scheduler.client.report [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocation for migration 1e89a30e-5033-486f-a50a-1bde86bc2874 [ 929.791625] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.853176] env[62923]: DEBUG nova.network.neutron [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Successfully updated port: bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.862356] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.862728] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance network_info: |[{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap545dfb40-7a", "ovs_interfaceid": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 929.863149] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:f5:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '545dfb40-7ae4-4d69-86f8-0d334ced67ff', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.870638] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating folder: Project (418b805157a74173b5cfe13ea5b61c13). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.873742] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27f9581b-170b-4587-bd39-ee0da708753b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.886228] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created folder: Project (418b805157a74173b5cfe13ea5b61c13) in parent group-v291405. [ 929.886228] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating folder: Instances. Parent ref: group-v291496. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.886228] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8286b9d-276d-44eb-ab93-f8ed276f72c5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.898235] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created folder: Instances in parent group-v291496. [ 929.898522] env[62923]: DEBUG oslo.service.loopingcall [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.898709] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.898923] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b32023df-854f-4f19-aea6-5d98dcfe006b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.920796] env[62923]: DEBUG nova.compute.manager [req-0f14aeef-b130-4d16-8737-239ed0785f07 req-eab5697e-b0ab-49a4-965b-3d733fa1f9d6 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Received event network-vif-plugged-bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.921073] env[62923]: DEBUG oslo_concurrency.lockutils [req-0f14aeef-b130-4d16-8737-239ed0785f07 req-eab5697e-b0ab-49a4-965b-3d733fa1f9d6 service nova] Acquiring lock "b145b71c-c56b-4872-bb61-fa3e65fef04f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.921226] env[62923]: DEBUG oslo_concurrency.lockutils [req-0f14aeef-b130-4d16-8737-239ed0785f07 req-eab5697e-b0ab-49a4-965b-3d733fa1f9d6 service nova] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.921403] env[62923]: DEBUG oslo_concurrency.lockutils [req-0f14aeef-b130-4d16-8737-239ed0785f07 req-eab5697e-b0ab-49a4-965b-3d733fa1f9d6 service nova] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.921570] env[62923]: DEBUG nova.compute.manager [req-0f14aeef-b130-4d16-8737-239ed0785f07 req-eab5697e-b0ab-49a4-965b-3d733fa1f9d6 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] No waiting events found dispatching network-vif-plugged-bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 929.921750] env[62923]: WARNING nova.compute.manager [req-0f14aeef-b130-4d16-8737-239ed0785f07 req-eab5697e-b0ab-49a4-965b-3d733fa1f9d6 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Received unexpected event network-vif-plugged-bed590d2-cf12-4135-a164-a61cade082eb for instance with vm_state building and task_state spawning. [ 929.930182] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.930182] env[62923]: value = "task-1370172" [ 929.930182] env[62923]: _type = "Task" [ 929.930182] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.936802] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Preparing fetch location {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 929.937073] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Fetch image to [datastore2] OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4/OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4.vmdk {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 929.937259] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Downloading stream optimized image 82a06d7c-e957-4bd3-97f2-0322af9583a1 to [datastore2] OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4/OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4.vmdk on the data store datastore2 as vApp {{(pid=62923) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 929.937431] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Downloading image file data 82a06d7c-e957-4bd3-97f2-0322af9583a1 to the ESX as VM named 'OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4' {{(pid=62923) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 929.946927] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370172, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.017997] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 930.017997] env[62923]: value = "resgroup-9" [ 930.017997] env[62923]: _type = "ResourcePool" [ 930.017997] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 930.018640] env[62923]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b95c279a-0171-4398-9c46-19f903443a28 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.043801] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lease: (returnval){ [ 930.043801] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 930.043801] env[62923]: _type = "HttpNfcLease" [ 930.043801] env[62923]: } obtained for vApp import into resource pool (val){ [ 930.043801] env[62923]: value = "resgroup-9" [ 930.043801] env[62923]: _type = "ResourcePool" [ 930.043801] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 930.044101] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the lease: (returnval){ [ 930.044101] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 930.044101] env[62923]: _type = "HttpNfcLease" [ 930.044101] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 930.048910] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd7033e-2610-4f9f-8bbb-f3c5ffc92499 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.054805] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.054805] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 930.054805] env[62923]: _type = "HttpNfcLease" [ 930.054805] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.062039] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95983a31-3dac-457e-9e56-5c0de029d17e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.074372] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370169, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.106027] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f345b100-9b57-4338-9198-6075e6084f37 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.110492] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf93058-b298-4138-be02-9a76f064ac2b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.124405] env[62923]: DEBUG nova.compute.provider_tree [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.256838] env[62923]: DEBUG nova.network.neutron [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Added VIF to instance network info cache for port 1353213d-e2e0-4537-a849-37be48c686ac. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 930.257426] env[62923]: DEBUG nova.network.neutron [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1353213d-e2e0-4537-a849-37be48c686ac", "address": "fa:16:3e:f6:2a:9f", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1353213d-e2", "ovs_interfaceid": "1353213d-e2e0-4537-a849-37be48c686ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.285079] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3daca5f-d9dd-4a4b-9c55-d8ec7949b3e3 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.801s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.358259] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.358416] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.358641] env[62923]: DEBUG nova.network.neutron [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.443779] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370172, 'name': CreateVM_Task, 'duration_secs': 0.346813} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.443946] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.444650] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.444821] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.445166] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 930.445423] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-993ccc74-5524-467f-80a2-322fe87e34fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.449960] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 930.449960] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52123566-b4c2-d1c3-f8c1-e01d9d293965" [ 930.449960] env[62923]: _type = "Task" [ 930.449960] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.458753] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52123566-b4c2-d1c3-f8c1-e01d9d293965, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.555036] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.555036] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 930.555036] env[62923]: _type = "HttpNfcLease" [ 930.555036] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.565513] env[62923]: DEBUG oslo_vmware.api [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370169, 'name': PowerOnVM_Task, 'duration_secs': 0.870144} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.565848] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.566071] env[62923]: INFO nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Took 8.73 seconds to spawn the instance on the hypervisor. [ 930.566258] env[62923]: DEBUG nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 930.567012] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6725169-c885-4870-b9bb-12c8bcf5d707 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.628313] env[62923]: DEBUG nova.scheduler.client.report [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.760626] env[62923]: DEBUG oslo_concurrency.lockutils [req-2c5d5ce2-4db8-4619-bd30-55df98c27869 req-f23804e6-6f36-4df2-bd28-43a59bd1ad82 service nova] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.761176] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.761410] env[62923]: DEBUG nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.922914] env[62923]: DEBUG nova.network.neutron [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.962310] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52123566-b4c2-d1c3-f8c1-e01d9d293965, 'name': SearchDatastore_Task, 'duration_secs': 0.011375} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.962628] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.962873] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.963152] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.963413] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.963627] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.963920] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9535e662-962c-4457-b21f-27218705fded {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.971777] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.971963] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.972931] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daae8679-15ac-4a8e-bded-1c6dd784db4c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.977649] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 930.977649] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ec9bcd-d1bd-728c-fe9b-c9c3bbb7dac7" [ 930.977649] env[62923]: _type = "Task" [ 930.977649] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.985891] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ec9bcd-d1bd-728c-fe9b-c9c3bbb7dac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.052799] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.052799] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 931.052799] env[62923]: _type = "HttpNfcLease" [ 931.052799] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 931.084188] env[62923]: INFO nova.compute.manager [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Took 18.93 seconds to build instance. [ 931.133044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.924s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.136716] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.260s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.138688] env[62923]: INFO nova.compute.claims [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.161698] env[62923]: INFO nova.scheduler.client.report [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocations for instance 3f60e93d-15ae-4fe4-ba86-6b6b123b645c [ 931.164121] env[62923]: DEBUG nova.network.neutron [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updating instance_info_cache with network_info: [{"id": "bed590d2-cf12-4135-a164-a61cade082eb", "address": "fa:16:3e:84:e6:01", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed590d2-cf", "ovs_interfaceid": "bed590d2-cf12-4135-a164-a61cade082eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.310399] env[62923]: WARNING nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] dc0481d3-aa80-48c6-bea8-294b2d1f77ec already exists in list: networks containing: ['dc0481d3-aa80-48c6-bea8-294b2d1f77ec']. ignoring it [ 931.310618] env[62923]: WARNING nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] dc0481d3-aa80-48c6-bea8-294b2d1f77ec already exists in list: networks containing: ['dc0481d3-aa80-48c6-bea8-294b2d1f77ec']. ignoring it [ 931.310790] env[62923]: WARNING nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] 1353213d-e2e0-4537-a849-37be48c686ac already exists in list: port_ids containing: ['1353213d-e2e0-4537-a849-37be48c686ac']. ignoring it [ 931.488832] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ec9bcd-d1bd-728c-fe9b-c9c3bbb7dac7, 'name': SearchDatastore_Task, 'duration_secs': 0.012315} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.489689] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-993dc6c2-5767-44e7-8d62-51e570dc0856 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.495187] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 931.495187] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523ab12c-5e82-9b1d-2220-abe12ebcae2d" [ 931.495187] env[62923]: _type = "Task" [ 931.495187] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.503816] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523ab12c-5e82-9b1d-2220-abe12ebcae2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.553519] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 931.553519] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 931.553519] env[62923]: _type = "HttpNfcLease" [ 931.553519] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 931.553831] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 931.553831] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cdb5d2-a589-5413-5e88-8f23500185d6" [ 931.553831] env[62923]: _type = "HttpNfcLease" [ 931.553831] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 931.554519] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d6599a-8d9e-428a-a802-cfe8b047d454 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.561912] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52634b40-6b26-84cf-cb0c-dcb76e4ae744/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 931.562145] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52634b40-6b26-84cf-cb0c-dcb76e4ae744/disk-0.vmdk. {{(pid=62923) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 931.623557] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8004fe6a-abf6-4d80-b202-63ffe9ccafc7 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "906470fc-5fec-4c98-8a38-337361e12bc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.485s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.635088] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-238f53a1-5182-470d-9b20-c2a5ac010c5a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.669201] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.669512] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Instance network_info: |[{"id": "bed590d2-cf12-4135-a164-a61cade082eb", "address": "fa:16:3e:84:e6:01", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed590d2-cf", "ovs_interfaceid": "bed590d2-cf12-4135-a164-a61cade082eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 931.669929] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:e6:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a78d5760-0bb1-4476-9578-8ad3c3144439', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bed590d2-cf12-4135-a164-a61cade082eb', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.677672] env[62923]: DEBUG oslo.service.loopingcall [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.680435] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.680890] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44596753-3d6b-49df-95f3-78ae33cd0f3e tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "3f60e93d-15ae-4fe4-ba86-6b6b123b645c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.516s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.681728] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-369a7b42-ce62-4b64-9ca2-9f9ca6a235f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.705643] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.705643] env[62923]: value = "task-1370174" [ 931.705643] env[62923]: _type = "Task" [ 931.705643] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.713222] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370174, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.734552] env[62923]: DEBUG nova.network.neutron [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1353213d-e2e0-4537-a849-37be48c686ac", "address": "fa:16:3e:f6:2a:9f", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1353213d-e2", "ovs_interfaceid": "1353213d-e2e0-4537-a849-37be48c686ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.933316] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.933714] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.934055] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.934380] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.934900] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.938629] env[62923]: INFO nova.compute.manager [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Terminating instance [ 931.941572] env[62923]: DEBUG nova.compute.manager [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 931.941680] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.942837] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081a382b-8dd8-4583-939d-25d69af1c2f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.952752] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.954442] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a99f8a13-55e6-447c-83de-db52f8ff4db8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.962653] env[62923]: DEBUG oslo_vmware.api [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 931.962653] env[62923]: value = "task-1370175" [ 931.962653] env[62923]: _type = "Task" [ 931.962653] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.971110] env[62923]: DEBUG oslo_vmware.api [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370175, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.005836] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523ab12c-5e82-9b1d-2220-abe12ebcae2d, 'name': SearchDatastore_Task, 'duration_secs': 0.01459} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.005836] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.005968] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.006665] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17a6c3df-2c60-4565-b7d8-8cfa41b9754e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.014761] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 932.014761] env[62923]: value = "task-1370176" [ 932.014761] env[62923]: _type = "Task" [ 932.014761] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.024796] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.216932] env[62923]: DEBUG nova.compute.manager [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Received event network-changed-bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.217153] env[62923]: DEBUG nova.compute.manager [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Refreshing instance network info cache due to event network-changed-bed590d2-cf12-4135-a164-a61cade082eb. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 932.217413] env[62923]: DEBUG oslo_concurrency.lockutils [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] Acquiring lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.217562] env[62923]: DEBUG oslo_concurrency.lockutils [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] Acquired lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.217729] env[62923]: DEBUG nova.network.neutron [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Refreshing network info cache for port bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.227876] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370174, 'name': CreateVM_Task, 'duration_secs': 0.344405} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.229695] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.232888] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.233072] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.233409] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.234077] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f87e5bb-17ea-4ca1-b17d-39139f2f8303 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.237728] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.238343] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.238618] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.239805] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ba3b18-582c-42c4-a846-5c8aba490384 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.250445] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 932.250445] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d50358-4491-521d-0d16-0d348afbc5c4" [ 932.250445] env[62923]: _type = "Task" [ 932.250445] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.269726] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.269977] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.270152] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.270341] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.270492] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.270866] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.270866] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.270989] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.271168] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.271784] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.271784] env[62923]: DEBUG nova.virt.hardware [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.278300] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Reconfiguring VM to attach interface {{(pid=62923) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 932.287689] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-906beafe-a310-41ac-b78b-9422e6adbe30 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.306504] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d50358-4491-521d-0d16-0d348afbc5c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.311734] env[62923]: DEBUG oslo_vmware.api [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 932.311734] env[62923]: value = "task-1370177" [ 932.311734] env[62923]: _type = "Task" [ 932.311734] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.323755] env[62923]: DEBUG oslo_vmware.api [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370177, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.449020] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57904c1-210a-4b48-bcf8-957a878c44ab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.456254] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a3be2d-854e-4965-8eeb-42a5fb24c63a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.497781] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d067b95a-af68-4ee1-af8b-3fff4fbb25ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.514817] env[62923]: DEBUG oslo_vmware.api [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370175, 'name': PowerOffVM_Task, 'duration_secs': 0.30742} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.517922] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.518565] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.523184] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d461c08f-52c6-4d6e-ab06-56018dde3530 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.526534] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223a582a-7f0c-4e64-b941-95a96953c819 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.542157] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370176, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.551535] env[62923]: DEBUG nova.compute.provider_tree [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.556372] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Completed reading data from the image iterator. {{(pid=62923) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 932.556372] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52634b40-6b26-84cf-cb0c-dcb76e4ae744/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 932.557421] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1070a15f-26de-4420-b0e7-b4e763d1d871 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.565812] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52634b40-6b26-84cf-cb0c-dcb76e4ae744/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 932.566026] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52634b40-6b26-84cf-cb0c-dcb76e4ae744/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 932.566326] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f7564640-a3e8-426b-9caf-9e2af58a060d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.604731] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.604731] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.604731] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleting the datastore file [datastore1] 066da19f-daf0-44e3-8ae0-89f0c970cb92 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.606640] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9a6828d-d68e-47f9-b833-75afac2a2f69 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.612206] env[62923]: DEBUG oslo_vmware.api [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 932.612206] env[62923]: value = "task-1370179" [ 932.612206] env[62923]: _type = "Task" [ 932.612206] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.619858] env[62923]: DEBUG oslo_vmware.api [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.623836] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.624082] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.762857] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d50358-4491-521d-0d16-0d348afbc5c4, 'name': SearchDatastore_Task, 'duration_secs': 0.065836} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.765417] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.765691] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.765949] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.766139] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.766352] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.766987] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cd6af75-0bb2-472c-a58b-51475141cb26 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.776994] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.777203] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.777971] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa3d5d5b-c7e4-4d3a-8904-508cac83cad6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.783148] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 932.783148] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b9db1d-7af5-32ba-c194-84055b79d4e6" [ 932.783148] env[62923]: _type = "Task" [ 932.783148] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.791092] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b9db1d-7af5-32ba-c194-84055b79d4e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.823124] env[62923]: DEBUG oslo_vmware.api [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.853130] env[62923]: DEBUG oslo_vmware.rw_handles [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52634b40-6b26-84cf-cb0c-dcb76e4ae744/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 932.854419] env[62923]: INFO nova.virt.vmwareapi.images [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Downloaded image file data 82a06d7c-e957-4bd3-97f2-0322af9583a1 [ 932.854419] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75573f77-7bae-432e-9aaf-c2a41d2e678d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.870309] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac552fa3-65db-4030-8584-d475603b6bd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.956899] env[62923]: INFO nova.virt.vmwareapi.images [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] The imported VM was unregistered [ 932.959463] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Caching image {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 932.959680] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating directory with path [datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1 {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.959953] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-962ecce3-5d4d-438d-a954-7208378c4c85 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.979751] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created directory with path [datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1 {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.980096] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4/OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4.vmdk to [datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk. {{(pid=62923) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 932.980543] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1b24c598-a49f-4bf8-9c83-14603f308821 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.989290] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 932.989290] env[62923]: value = "task-1370181" [ 932.989290] env[62923]: _type = "Task" [ 932.989290] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.999078] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.006799] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "906470fc-5fec-4c98-8a38-337361e12bc5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.007076] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "906470fc-5fec-4c98-8a38-337361e12bc5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.007293] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "906470fc-5fec-4c98-8a38-337361e12bc5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.007481] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "906470fc-5fec-4c98-8a38-337361e12bc5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.007770] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "906470fc-5fec-4c98-8a38-337361e12bc5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.009892] env[62923]: INFO nova.compute.manager [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Terminating instance [ 933.011718] env[62923]: DEBUG nova.compute.manager [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 933.011906] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 933.012815] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d243a81-4fc8-42f9-b53c-55a9af9f69fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.026087] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370176, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577442} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.026980] env[62923]: DEBUG nova.network.neutron [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updated VIF entry in instance network info cache for port bed590d2-cf12-4135-a164-a61cade082eb. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.027312] env[62923]: DEBUG nova.network.neutron [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updating instance_info_cache with network_info: [{"id": "bed590d2-cf12-4135-a164-a61cade082eb", "address": "fa:16:3e:84:e6:01", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed590d2-cf", "ovs_interfaceid": "bed590d2-cf12-4135-a164-a61cade082eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.030404] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.030635] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.030932] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.031384] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc4bd852-bef9-4036-b403-5e981070aeb7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.033838] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7647570a-983f-4e05-954f-31e12a9807c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.040305] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 933.040305] env[62923]: value = "task-1370182" [ 933.040305] env[62923]: _type = "Task" [ 933.040305] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.041649] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 933.041649] env[62923]: value = "task-1370183" [ 933.041649] env[62923]: _type = "Task" [ 933.041649] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.054990] env[62923]: DEBUG nova.scheduler.client.report [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.058758] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.062721] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.123027] env[62923]: DEBUG oslo_vmware.api [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35106} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.123263] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.123543] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.123723] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.123848] env[62923]: INFO nova.compute.manager [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Took 1.18 seconds to destroy the instance on the hypervisor. [ 933.124122] env[62923]: DEBUG oslo.service.loopingcall [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.124328] env[62923]: DEBUG nova.compute.manager [-] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 933.124419] env[62923]: DEBUG nova.network.neutron [-] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.126697] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 933.295139] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b9db1d-7af5-32ba-c194-84055b79d4e6, 'name': SearchDatastore_Task, 'duration_secs': 0.013821} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.296221] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcb82d9e-7475-4108-b33f-5870483028cf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.302427] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 933.302427] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526dbb40-48bb-25b3-cf30-38be652a4cbc" [ 933.302427] env[62923]: _type = "Task" [ 933.302427] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.312638] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526dbb40-48bb-25b3-cf30-38be652a4cbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.322412] env[62923]: DEBUG oslo_vmware.api [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370177, 'name': ReconfigVM_Task, 'duration_secs': 0.749675} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.323036] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.323448] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Reconfigured VM to attach interface {{(pid=62923) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 933.502516] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.532379] env[62923]: DEBUG oslo_concurrency.lockutils [req-818d3305-3116-43f2-a71e-f531636195de req-992ad3a6-d7c1-4337-8882-11f0d3d24edb service nova] Releasing lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.554405] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135917} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.557573] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 933.557959] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370183, 'name': PowerOffVM_Task, 'duration_secs': 0.231454} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.558828] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8910a13f-6cfd-4d28-baf2-72a921ea07f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.561395] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.561579] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.562348] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.562831] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 933.565398] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4aff0579-b00f-43d6-ac20-7cf2b60a6574 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.588329] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.589080] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2b99467-61ca-4eb3-bfcd-f3ef6440a339 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.610342] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 933.610342] env[62923]: value = "task-1370185" [ 933.610342] env[62923]: _type = "Task" [ 933.610342] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.619503] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370185, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.648294] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.648567] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.650192] env[62923]: INFO nova.compute.claims [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.661815] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "4de1c06d-3261-4447-b5bc-a21a91f7a812" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.662100] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.793078] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 933.793388] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 933.793718] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleting the datastore file [datastore2] 906470fc-5fec-4c98-8a38-337361e12bc5 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 933.794053] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a9f9389-9495-46df-9144-8178ea6d0031 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.801352] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 933.801352] env[62923]: value = "task-1370186" [ 933.801352] env[62923]: _type = "Task" [ 933.801352] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.813713] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.817596] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526dbb40-48bb-25b3-cf30-38be652a4cbc, 'name': SearchDatastore_Task, 'duration_secs': 0.065532} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.817963] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.818299] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] b145b71c-c56b-4872-bb61-fa3e65fef04f/b145b71c-c56b-4872-bb61-fa3e65fef04f.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 933.818614] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ad64424-fd1f-4381-b219-3e52109e1eba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.825705] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 933.825705] env[62923]: value = "task-1370187" [ 933.825705] env[62923]: _type = "Task" [ 933.825705] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.830421] env[62923]: DEBUG oslo_concurrency.lockutils [None req-92d407df-47c9-40a9-b4c0-c2c6c20d3d1a tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-1353213d-e2e0-4537-a849-37be48c686ac" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.242s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.836471] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.001114] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.007761] env[62923]: DEBUG nova.network.neutron [-] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.069561] env[62923]: DEBUG nova.compute.utils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.071913] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 934.072236] env[62923]: DEBUG nova.network.neutron [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 934.117742] env[62923]: DEBUG nova.policy [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '676a737149a9418498a55f83760df073', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1cf5e642524949a8366bf54d00593e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 934.125748] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370185, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.165287] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 934.311312] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.338985] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.388136] env[62923]: DEBUG nova.compute.manager [req-5f5999c9-b9e2-4b1c-a383-94ea508223bf req-ff0e3cc7-a5f6-47b8-9b4e-28c1c0250aca service nova] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Received event network-vif-deleted-0eb9a757-0625-4e00-a9b0-55888eb57e7b {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.501872] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.512487] env[62923]: INFO nova.compute.manager [-] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Took 1.39 seconds to deallocate network for instance. [ 934.523128] env[62923]: DEBUG nova.network.neutron [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Successfully created port: f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 934.583895] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 934.621968] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370185, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.686721] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.815770] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.837905] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.926206] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f0e8bf-ed0d-4e2c-b7fe-2eeea74f7d25 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.934306] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3187de-a7ab-4f3b-a99a-15def12506d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.965784] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a193ed0-2e36-49ca-927a-22828ae7389b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.973925] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cef5e0-1667-4f0c-bda4-b7ec0e227f5e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.990361] env[62923]: DEBUG nova.compute.provider_tree [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.000577] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.020179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.122728] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370185, 'name': ReconfigVM_Task, 'duration_secs': 1.023185} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.123083] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.123889] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cff7d2e-fe4c-425c-80e8-d5991f9871ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.136056] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 935.136056] env[62923]: value = "task-1370188" [ 935.136056] env[62923]: _type = "Task" [ 935.136056] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.146624] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370188, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.312396] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.336865] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370187, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.494153] env[62923]: DEBUG nova.scheduler.client.report [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 935.508016] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.557266] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "interface-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-1353213d-e2e0-4537-a849-37be48c686ac" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.557587] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-1353213d-e2e0-4537-a849-37be48c686ac" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.599718] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 935.632673] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 935.632673] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 935.632890] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 935.633030] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 935.633234] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 935.633470] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 935.634314] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 935.634314] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 935.634314] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 935.634314] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 935.634537] env[62923]: DEBUG nova.virt.hardware [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 935.635882] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb66e4a-11d5-42c5-b982-eb75effcf605 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.651434] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c428ce4c-a15f-4929-a0d0-e71c113a3bdc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.656254] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370188, 'name': Rename_Task, 'duration_secs': 0.317456} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.656664] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.657416] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11f93f8b-1aea-4d48-8c74-b27c8dfeb8e1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.673102] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 935.673102] env[62923]: value = "task-1370189" [ 935.673102] env[62923]: _type = "Task" [ 935.673102] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.681968] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.812814] env[62923]: DEBUG oslo_vmware.api [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.62757} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.813132] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.813334] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.813520] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.813697] env[62923]: INFO nova.compute.manager [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Took 2.80 seconds to destroy the instance on the hypervisor. [ 935.813933] env[62923]: DEBUG oslo.service.loopingcall [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.814142] env[62923]: DEBUG nova.compute.manager [-] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 935.814236] env[62923]: DEBUG nova.network.neutron [-] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.839161] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370187, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.929258} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.839391] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] b145b71c-c56b-4872-bb61-fa3e65fef04f/b145b71c-c56b-4872-bb61-fa3e65fef04f.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.839606] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.839855] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d52069a7-006c-4b76-b927-a73a9e42b5f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.846347] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 935.846347] env[62923]: value = "task-1370190" [ 935.846347] env[62923]: _type = "Task" [ 935.846347] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.856168] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.006025] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.006290] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 936.009618] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.323s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.010829] env[62923]: INFO nova.compute.claims [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.018771] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370181, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.651577} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.019072] env[62923]: INFO nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4/OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4.vmdk to [datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk. [ 936.019265] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Cleaning up location [datastore2] OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4 {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 936.019440] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_d647e1b9-7d8d-43c0-af05-89fbfff34ad4 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.019701] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6573af39-c036-4d11-b769-0a07ef240857 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.026686] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 936.026686] env[62923]: value = "task-1370191" [ 936.026686] env[62923]: _type = "Task" [ 936.026686] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.035618] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.060822] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.061015] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.061916] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d18e47e-cb27-4867-82df-e6606a7b318c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.081321] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4015bbec-b5b6-4782-be3d-683e68f89cf6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.110951] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Reconfiguring VM to detach interface {{(pid=62923) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 936.111329] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-413761ca-84f9-458d-8fe3-4b0a18d31c1c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.131847] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 936.131847] env[62923]: value = "task-1370192" [ 936.131847] env[62923]: _type = "Task" [ 936.131847] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.141168] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.184596] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370189, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.359697] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075223} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.359966] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.360729] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf241a0-ce58-494a-aa7c-59e28e81da37 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.385063] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] b145b71c-c56b-4872-bb61-fa3e65fef04f/b145b71c-c56b-4872-bb61-fa3e65fef04f.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.385377] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-713f592c-b563-493e-afc6-fd88c9ef0276 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.400648] env[62923]: DEBUG nova.network.neutron [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Successfully updated port: f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.406576] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 936.406576] env[62923]: value = "task-1370193" [ 936.406576] env[62923]: _type = "Task" [ 936.406576] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.415418] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.423620] env[62923]: DEBUG nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Received event network-vif-deleted-5d5b7910-f073-428b-abd2-b725d57387b6 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.423882] env[62923]: INFO nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Neutron deleted interface 5d5b7910-f073-428b-abd2-b725d57387b6; detaching it from the instance and deleting it from the info cache [ 936.424064] env[62923]: DEBUG nova.network.neutron [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.515797] env[62923]: DEBUG nova.compute.utils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 936.519038] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 936.519199] env[62923]: DEBUG nova.network.neutron [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 936.536055] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108824} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.536222] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.536274] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.536515] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk to [datastore2] 41cc788d-9be8-4959-9cef-d91304f5879d/41cc788d-9be8-4959-9cef-d91304f5879d.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 936.536763] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-685e3e5a-13a2-48a5-b3a9-204fc73575bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.543192] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 936.543192] env[62923]: value = "task-1370194" [ 936.543192] env[62923]: _type = "Task" [ 936.543192] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.551595] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.577441] env[62923]: DEBUG nova.policy [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68e62d519b19448c8cac7f1b2e55a087', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3b09245b63144e9bbcb2262aef33a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 936.643880] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.649417] env[62923]: DEBUG nova.network.neutron [-] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.695422] env[62923]: DEBUG oslo_vmware.api [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370189, 'name': PowerOnVM_Task, 'duration_secs': 0.529989} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.695422] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.695422] env[62923]: INFO nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Took 10.18 seconds to spawn the instance on the hypervisor. [ 936.695422] env[62923]: DEBUG nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 936.695422] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf4daea-af2c-4780-833b-a0b4cf9bd81e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.900275] env[62923]: DEBUG nova.network.neutron [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Successfully created port: 422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.902483] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-eaa654f9-023d-4514-930d-6bebd421325a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.902614] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-eaa654f9-023d-4514-930d-6bebd421325a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.902758] env[62923]: DEBUG nova.network.neutron [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.921847] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370193, 'name': ReconfigVM_Task, 'duration_secs': 0.415064} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.922394] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Reconfigured VM instance instance-00000058 to attach disk [datastore2] b145b71c-c56b-4872-bb61-fa3e65fef04f/b145b71c-c56b-4872-bb61-fa3e65fef04f.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.923123] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3117c96-97a6-4fab-a5d8-e8ac0d3563ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.929297] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6da33e69-9cf9-498c-ad3d-40675be970e8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.933869] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 936.933869] env[62923]: value = "task-1370195" [ 936.933869] env[62923]: _type = "Task" [ 936.933869] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.946625] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c2ecc1-ecf4-49fe-a125-61c2ecb2e6f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.963152] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370195, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.985880] env[62923]: DEBUG nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Detach interface failed, port_id=5d5b7910-f073-428b-abd2-b725d57387b6, reason: Instance 906470fc-5fec-4c98-8a38-337361e12bc5 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 936.986292] env[62923]: DEBUG nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Received event network-vif-plugged-f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.986575] env[62923]: DEBUG oslo_concurrency.lockutils [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] Acquiring lock "eaa654f9-023d-4514-930d-6bebd421325a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.987184] env[62923]: DEBUG oslo_concurrency.lockutils [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] Lock "eaa654f9-023d-4514-930d-6bebd421325a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.987184] env[62923]: DEBUG oslo_concurrency.lockutils [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] Lock "eaa654f9-023d-4514-930d-6bebd421325a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.987184] env[62923]: DEBUG nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] No waiting events found dispatching network-vif-plugged-f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 936.987499] env[62923]: WARNING nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Received unexpected event network-vif-plugged-f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 for instance with vm_state building and task_state spawning. [ 936.987760] env[62923]: DEBUG nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Received event network-changed-f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.988292] env[62923]: DEBUG nova.compute.manager [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Refreshing instance network info cache due to event network-changed-f1ad0989-e12d-4073-92b4-3a53bf5b8eb2. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 936.988560] env[62923]: DEBUG oslo_concurrency.lockutils [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] Acquiring lock "refresh_cache-eaa654f9-023d-4514-930d-6bebd421325a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.019823] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 937.057322] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370194, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.146015] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.151667] env[62923]: INFO nova.compute.manager [-] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Took 1.34 seconds to deallocate network for instance. [ 937.215551] env[62923]: INFO nova.compute.manager [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Took 18.21 seconds to build instance. [ 937.312645] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668b7323-e214-4708-950c-b5a45786134d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.321774] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc1de92-9c60-4ddd-9730-513c4e1fdd8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.357140] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b298d99f-8f6e-4cf8-938d-b9dda422c072 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.365591] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a17f771-4b67-4f6c-b3e1-2aae26eb549d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.379832] env[62923]: DEBUG nova.compute.provider_tree [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.442544] env[62923]: DEBUG nova.network.neutron [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.450666] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370195, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.555928] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370194, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.612050] env[62923]: DEBUG nova.network.neutron [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Updating instance_info_cache with network_info: [{"id": "f1ad0989-e12d-4073-92b4-3a53bf5b8eb2", "address": "fa:16:3e:07:71:9c", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ad0989-e1", "ovs_interfaceid": "f1ad0989-e12d-4073-92b4-3a53bf5b8eb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.644585] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.659459] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.718619] env[62923]: DEBUG oslo_concurrency.lockutils [None req-3af470e9-3c44-4f7a-9244-7269ddbb60e8 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.723s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.883736] env[62923]: DEBUG nova.scheduler.client.report [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 937.946562] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370195, 'name': Rename_Task, 'duration_secs': 0.988722} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.946881] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.947212] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12329740-a3b4-4eaa-a85e-c5e0ba8d5f86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.953697] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 937.953697] env[62923]: value = "task-1370196" [ 937.953697] env[62923]: _type = "Task" [ 937.953697] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.962413] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.034131] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 938.055357] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370194, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.067158] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.067506] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.067587] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.067779] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.068018] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.068231] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.068449] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.068612] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.068783] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.068951] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.069257] env[62923]: DEBUG nova.virt.hardware [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.070195] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ae6e3e-d3df-43d6-a7fa-f1faf160bb3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.078982] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e70ae3d-26dd-4e38-a626-785c48fb8025 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.115413] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-eaa654f9-023d-4514-930d-6bebd421325a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.116016] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance network_info: |[{"id": "f1ad0989-e12d-4073-92b4-3a53bf5b8eb2", "address": "fa:16:3e:07:71:9c", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ad0989-e1", "ovs_interfaceid": "f1ad0989-e12d-4073-92b4-3a53bf5b8eb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 938.116357] env[62923]: DEBUG oslo_concurrency.lockutils [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] Acquired lock "refresh_cache-eaa654f9-023d-4514-930d-6bebd421325a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.116562] env[62923]: DEBUG nova.network.neutron [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Refreshing network info cache for port f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.117950] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:71:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1ad0989-e12d-4073-92b4-3a53bf5b8eb2', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.126906] env[62923]: DEBUG oslo.service.loopingcall [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.128050] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.128273] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41ee9672-e45e-4508-b135-d3d6f9e25639 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.156838] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.158261] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.158261] env[62923]: value = "task-1370197" [ 938.158261] env[62923]: _type = "Task" [ 938.158261] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.167691] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370197, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.389827] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.390387] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 938.393391] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.373s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.393602] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.395923] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.737s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.396259] env[62923]: DEBUG nova.objects.instance [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lazy-loading 'resources' on Instance uuid 906470fc-5fec-4c98-8a38-337361e12bc5 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.423525] env[62923]: INFO nova.scheduler.client.report [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocations for instance 066da19f-daf0-44e3-8ae0-89f0c970cb92 [ 938.465557] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370196, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.562259] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370194, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.659979] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.669553] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370197, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.683442] env[62923]: DEBUG nova.compute.manager [req-bb89eac5-bd4c-412a-9d69-b40b7bd1d91a req-03903f10-e290-472a-a646-e847eaacec9e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Received event network-vif-plugged-422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.683724] env[62923]: DEBUG oslo_concurrency.lockutils [req-bb89eac5-bd4c-412a-9d69-b40b7bd1d91a req-03903f10-e290-472a-a646-e847eaacec9e service nova] Acquiring lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.683999] env[62923]: DEBUG oslo_concurrency.lockutils [req-bb89eac5-bd4c-412a-9d69-b40b7bd1d91a req-03903f10-e290-472a-a646-e847eaacec9e service nova] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.684323] env[62923]: DEBUG oslo_concurrency.lockutils [req-bb89eac5-bd4c-412a-9d69-b40b7bd1d91a req-03903f10-e290-472a-a646-e847eaacec9e service nova] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.685176] env[62923]: DEBUG nova.compute.manager [req-bb89eac5-bd4c-412a-9d69-b40b7bd1d91a req-03903f10-e290-472a-a646-e847eaacec9e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] No waiting events found dispatching network-vif-plugged-422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 938.685176] env[62923]: WARNING nova.compute.manager [req-bb89eac5-bd4c-412a-9d69-b40b7bd1d91a req-03903f10-e290-472a-a646-e847eaacec9e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Received unexpected event network-vif-plugged-422a6526-df54-4c7f-a43c-01c8902e1fb8 for instance with vm_state building and task_state spawning. [ 938.708376] env[62923]: DEBUG nova.compute.manager [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.708674] env[62923]: DEBUG nova.compute.manager [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing instance network info cache due to event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 938.708952] env[62923]: DEBUG oslo_concurrency.lockutils [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.709181] env[62923]: DEBUG oslo_concurrency.lockutils [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.709470] env[62923]: DEBUG nova.network.neutron [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.900084] env[62923]: DEBUG nova.compute.utils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 938.906826] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 938.910153] env[62923]: DEBUG nova.network.neutron [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 938.915978] env[62923]: DEBUG nova.network.neutron [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Successfully updated port: 422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.935158] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dd23bcb5-7db9-4287-88d0-5e4bf8dc2940 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "066da19f-daf0-44e3-8ae0-89f0c970cb92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.963520] env[62923]: DEBUG nova.policy [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c654b8365f5543f3bf713f3f5aa00654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a654d46357ed49cd95460a56926f102a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 938.969469] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370196, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.982701] env[62923]: DEBUG nova.network.neutron [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Updated VIF entry in instance network info cache for port f1ad0989-e12d-4073-92b4-3a53bf5b8eb2. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.983314] env[62923]: DEBUG nova.network.neutron [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Updating instance_info_cache with network_info: [{"id": "f1ad0989-e12d-4073-92b4-3a53bf5b8eb2", "address": "fa:16:3e:07:71:9c", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ad0989-e1", "ovs_interfaceid": "f1ad0989-e12d-4073-92b4-3a53bf5b8eb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.057995] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370194, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.465155} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.058407] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/82a06d7c-e957-4bd3-97f2-0322af9583a1/82a06d7c-e957-4bd3-97f2-0322af9583a1.vmdk to [datastore2] 41cc788d-9be8-4959-9cef-d91304f5879d/41cc788d-9be8-4959-9cef-d91304f5879d.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.059416] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509684a3-9b8b-4940-9336-df2e0746f85c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.087171] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 41cc788d-9be8-4959-9cef-d91304f5879d/41cc788d-9be8-4959-9cef-d91304f5879d.vmdk or device None with type streamOptimized {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.087580] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c57c092-cc15-4232-8e0c-21e128731529 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.114023] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 939.114023] env[62923]: value = "task-1370198" [ 939.114023] env[62923]: _type = "Task" [ 939.114023] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.120810] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.157058] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.175060] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370197, 'name': CreateVM_Task, 'duration_secs': 0.58556} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.175305] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.176282] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.176578] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.177041] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 939.177541] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e219c82-5a4c-4ef4-8b96-77e408053968 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.184940] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 939.184940] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527aaa1d-9832-7306-a133-ccf59f26c7a7" [ 939.184940] env[62923]: _type = "Task" [ 939.184940] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.198199] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527aaa1d-9832-7306-a133-ccf59f26c7a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.231201] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5086e1-5a35-4337-9cbf-25fbbab6f3d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.238446] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1626e2b5-b5cd-45d3-bc8c-283449a8657c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.273130] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dd224f-d42a-4bfa-af77-146ab51284aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.280925] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f0db51-98c1-4983-97a1-20349eb27f6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.295108] env[62923]: DEBUG nova.compute.provider_tree [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.322347] env[62923]: DEBUG nova.network.neutron [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Successfully created port: f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 939.407247] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 939.420818] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.421058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.421058] env[62923]: DEBUG nova.network.neutron [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.466406] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370196, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.488785] env[62923]: DEBUG oslo_concurrency.lockutils [req-bdb12573-05f2-4c08-9570-47fbb4fc472e req-4e359a0b-f4ce-459d-86ac-7e7b00cb0ca1 service nova] Releasing lock "refresh_cache-eaa654f9-023d-4514-930d-6bebd421325a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.554490] env[62923]: DEBUG nova.network.neutron [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updated VIF entry in instance network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.554862] env[62923]: DEBUG nova.network.neutron [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap545dfb40-7a", "ovs_interfaceid": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.624543] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370198, 'name': ReconfigVM_Task, 'duration_secs': 0.286473} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.624969] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 41cc788d-9be8-4959-9cef-d91304f5879d/41cc788d-9be8-4959-9cef-d91304f5879d.vmdk or device None with type streamOptimized {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 939.625821] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9616ac1-fcec-4c7c-ace9-80085e524595 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.633358] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 939.633358] env[62923]: value = "task-1370199" [ 939.633358] env[62923]: _type = "Task" [ 939.633358] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.644542] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370199, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.655268] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.698599] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527aaa1d-9832-7306-a133-ccf59f26c7a7, 'name': SearchDatastore_Task, 'duration_secs': 0.023484} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.698973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.699287] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.699538] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.699721] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.700357] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.700357] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02937525-b83c-40b1-8a03-84f76ea8c56d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.709993] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.710205] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.710931] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6166d8c-2d69-41a4-b8d4-b96f34d6319e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.715917] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 939.715917] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527b8ccb-4383-1443-863f-a075038f8b18" [ 939.715917] env[62923]: _type = "Task" [ 939.715917] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.723546] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527b8ccb-4383-1443-863f-a075038f8b18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.798627] env[62923]: DEBUG nova.scheduler.client.report [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 939.953355] env[62923]: DEBUG nova.network.neutron [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 939.966855] env[62923]: DEBUG oslo_vmware.api [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370196, 'name': PowerOnVM_Task, 'duration_secs': 1.583724} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.967156] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.967361] env[62923]: INFO nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Took 11.05 seconds to spawn the instance on the hypervisor. [ 939.967540] env[62923]: DEBUG nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 939.968650] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40cd3ad-c716-4930-ae03-c4df31278a5f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.057704] env[62923]: DEBUG oslo_concurrency.lockutils [req-f57c7155-0112-42db-a0aa-8e461523f8df req-a6a9619f-0029-40ef-abfc-ebf98d182adb service nova] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.090840] env[62923]: DEBUG nova.network.neutron [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance_info_cache with network_info: [{"id": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "address": "fa:16:3e:3e:65:69", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap422a6526-df", "ovs_interfaceid": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.143544] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370199, 'name': Rename_Task, 'duration_secs': 0.145021} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.143807] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.144070] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2826ee52-1586-4cd6-921e-6ba62b5628a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.154063] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.155243] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 940.155243] env[62923]: value = "task-1370200" [ 940.155243] env[62923]: _type = "Task" [ 940.155243] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.162290] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.226020] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527b8ccb-4383-1443-863f-a075038f8b18, 'name': SearchDatastore_Task, 'duration_secs': 0.023732} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.226810] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-585a9c62-2630-4500-9595-6600a002a622 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.232165] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 940.232165] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a07238-b720-9b37-c5e3-ad14122b0f1f" [ 940.232165] env[62923]: _type = "Task" [ 940.232165] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.241076] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a07238-b720-9b37-c5e3-ad14122b0f1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.303239] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.327121] env[62923]: INFO nova.scheduler.client.report [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 906470fc-5fec-4c98-8a38-337361e12bc5 [ 940.420799] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 940.446430] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 940.446683] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 940.446844] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 940.447040] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 940.447194] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 940.447345] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 940.447550] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 940.447735] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 940.447931] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 940.448600] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 940.448831] env[62923]: DEBUG nova.virt.hardware [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 940.449752] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aa0655-45e5-4014-8d64-72f29b372c32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.457943] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e995f696-bc7c-4d79-96f6-78bfa868bafb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.491018] env[62923]: INFO nova.compute.manager [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Took 20.96 seconds to build instance. [ 940.593628] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.593999] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Instance network_info: |[{"id": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "address": "fa:16:3e:3e:65:69", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap422a6526-df", "ovs_interfaceid": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 940.594450] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:65:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91712705-510f-41a0-a803-2ecd92b676e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '422a6526-df54-4c7f-a43c-01c8902e1fb8', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.602278] env[62923]: DEBUG oslo.service.loopingcall [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.602779] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.603011] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7bddf84-9fbb-4676-aaca-1fadff491f80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.622364] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.622364] env[62923]: value = "task-1370201" [ 940.622364] env[62923]: _type = "Task" [ 940.622364] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.629543] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370201, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.655512] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.667532] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370200, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.744269] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a07238-b720-9b37-c5e3-ad14122b0f1f, 'name': SearchDatastore_Task, 'duration_secs': 0.082976} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.744616] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.744970] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 940.745262] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c035a45-624e-4648-8b6a-5afc3f7c0804 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.753963] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 940.753963] env[62923]: value = "task-1370202" [ 940.753963] env[62923]: _type = "Task" [ 940.753963] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.760487] env[62923]: DEBUG nova.compute.manager [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Received event network-changed-422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.760758] env[62923]: DEBUG nova.compute.manager [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Refreshing instance network info cache due to event network-changed-422a6526-df54-4c7f-a43c-01c8902e1fb8. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.761050] env[62923]: DEBUG oslo_concurrency.lockutils [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] Acquiring lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.761266] env[62923]: DEBUG oslo_concurrency.lockutils [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] Acquired lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.761479] env[62923]: DEBUG nova.network.neutron [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Refreshing network info cache for port 422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.768742] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.781190] env[62923]: DEBUG nova.compute.manager [req-9a6d0864-ab3e-4631-bb10-ca4a6b75cf5f req-bab621c6-9332-4d82-bc13-da187b0c3690 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Received event network-vif-plugged-f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.781423] env[62923]: DEBUG oslo_concurrency.lockutils [req-9a6d0864-ab3e-4631-bb10-ca4a6b75cf5f req-bab621c6-9332-4d82-bc13-da187b0c3690 service nova] Acquiring lock "4de1c06d-3261-4447-b5bc-a21a91f7a812-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.781864] env[62923]: DEBUG oslo_concurrency.lockutils [req-9a6d0864-ab3e-4631-bb10-ca4a6b75cf5f req-bab621c6-9332-4d82-bc13-da187b0c3690 service nova] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.781864] env[62923]: DEBUG oslo_concurrency.lockutils [req-9a6d0864-ab3e-4631-bb10-ca4a6b75cf5f req-bab621c6-9332-4d82-bc13-da187b0c3690 service nova] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.781954] env[62923]: DEBUG nova.compute.manager [req-9a6d0864-ab3e-4631-bb10-ca4a6b75cf5f req-bab621c6-9332-4d82-bc13-da187b0c3690 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] No waiting events found dispatching network-vif-plugged-f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.782931] env[62923]: WARNING nova.compute.manager [req-9a6d0864-ab3e-4631-bb10-ca4a6b75cf5f req-bab621c6-9332-4d82-bc13-da187b0c3690 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Received unexpected event network-vif-plugged-f0a69691-e627-471b-bf2c-1705c8a4f373 for instance with vm_state building and task_state spawning. [ 940.837044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-11d4c63e-2bb0-4fa6-afb0-a8675f12431e tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "906470fc-5fec-4c98-8a38-337361e12bc5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.830s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.993948] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7fccac90-9017-4335-bb3d-cba77ae0cf3d tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.468s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.020707] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "0a9fdd83-3818-4831-90f9-9d30713961c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.021102] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.021407] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "0a9fdd83-3818-4831-90f9-9d30713961c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.021672] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.021866] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.026964] env[62923]: INFO nova.compute.manager [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Terminating instance [ 941.031082] env[62923]: DEBUG nova.compute.manager [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 941.031297] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.032237] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4047e3-bd56-4407-9eac-6a07c8737a0b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.040809] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.041130] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7b7b238-56e0-4284-97e7-9adbcd1bc9a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.053558] env[62923]: DEBUG oslo_vmware.api [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 941.053558] env[62923]: value = "task-1370203" [ 941.053558] env[62923]: _type = "Task" [ 941.053558] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.063694] env[62923]: DEBUG oslo_vmware.api [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.134888] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370201, 'name': CreateVM_Task, 'duration_secs': 0.345048} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.135317] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.135826] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.135976] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.136345] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.136647] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00afe04d-1373-405d-bf08-7823c0ac4438 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.143475] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 941.143475] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cfdb88-c470-4573-422f-628e0f353a13" [ 941.143475] env[62923]: _type = "Task" [ 941.143475] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.158847] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cfdb88-c470-4573-422f-628e0f353a13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.166484] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.174856] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370200, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.263698] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370202, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509293} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.266244] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 941.266503] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 941.266776] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfd1be38-831c-4db8-9854-c84785d94de9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.273082] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 941.273082] env[62923]: value = "task-1370204" [ 941.273082] env[62923]: _type = "Task" [ 941.273082] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.280806] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370204, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.326393] env[62923]: DEBUG nova.network.neutron [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Successfully updated port: f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 941.363853] env[62923]: DEBUG nova.compute.manager [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Received event network-changed-f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.364062] env[62923]: DEBUG nova.compute.manager [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Refreshing instance network info cache due to event network-changed-f0a69691-e627-471b-bf2c-1705c8a4f373. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 941.364938] env[62923]: DEBUG oslo_concurrency.lockutils [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] Acquiring lock "refresh_cache-4de1c06d-3261-4447-b5bc-a21a91f7a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.365127] env[62923]: DEBUG oslo_concurrency.lockutils [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] Acquired lock "refresh_cache-4de1c06d-3261-4447-b5bc-a21a91f7a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.365474] env[62923]: DEBUG nova.network.neutron [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Refreshing network info cache for port f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.522633] env[62923]: DEBUG nova.network.neutron [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updated VIF entry in instance network info cache for port 422a6526-df54-4c7f-a43c-01c8902e1fb8. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.522633] env[62923]: DEBUG nova.network.neutron [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance_info_cache with network_info: [{"id": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "address": "fa:16:3e:3e:65:69", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap422a6526-df", "ovs_interfaceid": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.564888] env[62923]: DEBUG oslo_vmware.api [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370203, 'name': PowerOffVM_Task, 'duration_secs': 0.298241} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.567486] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.567486] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.567486] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd306182-7d89-41e6-a8ef-9eddf02d869f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.626024] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.626024] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.626024] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleting the datastore file [datastore1] 0a9fdd83-3818-4831-90f9-9d30713961c5 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.626024] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c6a9815-3dcc-4b3f-8e9e-cca5a614d1f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.631827] env[62923]: DEBUG oslo_vmware.api [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 941.631827] env[62923]: value = "task-1370206" [ 941.631827] env[62923]: _type = "Task" [ 941.631827] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.639091] env[62923]: DEBUG oslo_vmware.api [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.654239] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cfdb88-c470-4573-422f-628e0f353a13, 'name': SearchDatastore_Task, 'duration_secs': 0.054916} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.654813] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.655070] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.655309] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.655458] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.655692] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.655964] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-becf80aa-aa3b-492e-a413-7efeccea5f74 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.660424] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.665742] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.665913] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.669143] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bad026f3-2ada-4159-b72b-fccbeaf44e4d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.671112] env[62923]: DEBUG oslo_vmware.api [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370200, 'name': PowerOnVM_Task, 'duration_secs': 1.115819} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.671358] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.671554] env[62923]: INFO nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Took 17.47 seconds to spawn the instance on the hypervisor. [ 941.671738] env[62923]: DEBUG nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 941.672713] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4992e2-4246-41f6-9a0c-770b26296c85 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.677919] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 941.677919] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c77e0e-2bc9-93c8-3b4c-76471cdf27d1" [ 941.677919] env[62923]: _type = "Task" [ 941.677919] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.689210] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c77e0e-2bc9-93c8-3b4c-76471cdf27d1, 'name': SearchDatastore_Task, 'duration_secs': 0.007215} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.689978] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-075cabfd-1996-4dbe-9e13-e2dfe7f59ca2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.694429] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 941.694429] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529cdbd9-b962-91d9-c9c9-9828683aeaf2" [ 941.694429] env[62923]: _type = "Task" [ 941.694429] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.701461] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529cdbd9-b962-91d9-c9c9-9828683aeaf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.782838] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370204, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059766} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.784030] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.784030] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e6ed76-452b-4a57-95a3-7aa44cd65d3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.806872] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.807224] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-530458b9-c1f2-4747-9dd6-7c0c32e2e24f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.825982] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 941.825982] env[62923]: value = "task-1370207" [ 941.825982] env[62923]: _type = "Task" [ 941.825982] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.829556] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-4de1c06d-3261-4447-b5bc-a21a91f7a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.834830] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370207, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.900096] env[62923]: DEBUG nova.network.neutron [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.997995] env[62923]: DEBUG nova.network.neutron [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.025113] env[62923]: DEBUG oslo_concurrency.lockutils [req-05972a5b-b6f3-42a3-a024-c7b48cf93f06 req-bdd00536-9cb3-4474-88b3-00701db7a16e service nova] Releasing lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.142687] env[62923]: DEBUG oslo_vmware.api [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.408802} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.143110] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.143224] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.143323] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.143506] env[62923]: INFO nova.compute.manager [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 942.143749] env[62923]: DEBUG oslo.service.loopingcall [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.143943] env[62923]: DEBUG nova.compute.manager [-] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 942.144045] env[62923]: DEBUG nova.network.neutron [-] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.157691] env[62923]: DEBUG oslo_vmware.api [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370192, 'name': ReconfigVM_Task, 'duration_secs': 5.905361} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.157941] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.158178] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Reconfigured VM to detach interface {{(pid=62923) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 942.192856] env[62923]: INFO nova.compute.manager [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Took 25.68 seconds to build instance. [ 942.205356] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529cdbd9-b962-91d9-c9c9-9828683aeaf2, 'name': SearchDatastore_Task, 'duration_secs': 0.023826} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.205356] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.205582] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d/2a9a93f8-9398-4a19-a149-a1092ceb416d.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.206250] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4bf9750-66cf-45d0-93f7-a9bcdea451d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.214087] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 942.214087] env[62923]: value = "task-1370208" [ 942.214087] env[62923]: _type = "Task" [ 942.214087] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.224410] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.336446] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370207, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.501543] env[62923]: DEBUG oslo_concurrency.lockutils [req-9383ef82-a324-4893-81f8-521fe92f507b req-fec8b996-bb2d-4a23-b1fd-23ff4f3eb807 service nova] Releasing lock "refresh_cache-4de1c06d-3261-4447-b5bc-a21a91f7a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.501902] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-4de1c06d-3261-4447-b5bc-a21a91f7a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.502076] env[62923]: DEBUG nova.network.neutron [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 942.695603] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a05ed8c-7825-4969-aa22-24654481f6d8 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "41cc788d-9be8-4959-9cef-d91304f5879d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.198s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.724298] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370208, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.797813] env[62923]: DEBUG nova.compute.manager [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Received event network-changed-bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.798036] env[62923]: DEBUG nova.compute.manager [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Refreshing instance network info cache due to event network-changed-bed590d2-cf12-4135-a164-a61cade082eb. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.798265] env[62923]: DEBUG oslo_concurrency.lockutils [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] Acquiring lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.798414] env[62923]: DEBUG oslo_concurrency.lockutils [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] Acquired lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.798579] env[62923]: DEBUG nova.network.neutron [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Refreshing network info cache for port bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.840841] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370207, 'name': ReconfigVM_Task, 'duration_secs': 0.655751} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.841159] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Reconfigured VM instance instance-00000059 to attach disk [datastore1] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.843187] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c0430f8-223d-4d30-aeac-2e3eb55a4c4b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.850580] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 942.850580] env[62923]: value = "task-1370209" [ 942.850580] env[62923]: _type = "Task" [ 942.850580] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.860642] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370209, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.871056] env[62923]: DEBUG nova.network.neutron [-] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.041429] env[62923]: DEBUG nova.network.neutron [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.225146] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.920923} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.225447] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d/2a9a93f8-9398-4a19-a149-a1092ceb416d.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.225734] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.226046] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c270520b-60ee-4c37-b901-5c8946f6acfa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.232163] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 943.232163] env[62923]: value = "task-1370210" [ 943.232163] env[62923]: _type = "Task" [ 943.232163] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.237745] env[62923]: DEBUG nova.network.neutron [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Updating instance_info_cache with network_info: [{"id": "f0a69691-e627-471b-bf2c-1705c8a4f373", "address": "fa:16:3e:61:5a:f6", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0a69691-e6", "ovs_interfaceid": "f0a69691-e627-471b-bf2c-1705c8a4f373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.242157] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.335138] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "41cc788d-9be8-4959-9cef-d91304f5879d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.335389] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "41cc788d-9be8-4959-9cef-d91304f5879d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.335618] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "41cc788d-9be8-4959-9cef-d91304f5879d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.336244] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "41cc788d-9be8-4959-9cef-d91304f5879d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.336430] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "41cc788d-9be8-4959-9cef-d91304f5879d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.338773] env[62923]: INFO nova.compute.manager [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Terminating instance [ 943.340450] env[62923]: DEBUG nova.compute.manager [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 943.340644] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.341493] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8017202e-8c75-42b3-adc5-f86af5e7dfaf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.348806] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.349043] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f175ce0-3349-4064-bc8a-8d6287ab1ac6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.355866] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 943.355866] env[62923]: value = "task-1370211" [ 943.355866] env[62923]: _type = "Task" [ 943.355866] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.362371] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370209, 'name': Rename_Task, 'duration_secs': 0.192366} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.362956] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 943.363226] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb9a4ad7-822d-4a49-a0f6-c75662a2edcb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.367595] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.372702] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 943.372702] env[62923]: value = "task-1370212" [ 943.372702] env[62923]: _type = "Task" [ 943.372702] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.373142] env[62923]: INFO nova.compute.manager [-] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Took 1.23 seconds to deallocate network for instance. [ 943.387331] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370212, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.466478] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.467090] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.467090] env[62923]: DEBUG nova.network.neutron [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.539219] env[62923]: DEBUG nova.network.neutron [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updated VIF entry in instance network info cache for port bed590d2-cf12-4135-a164-a61cade082eb. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.539660] env[62923]: DEBUG nova.network.neutron [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updating instance_info_cache with network_info: [{"id": "bed590d2-cf12-4135-a164-a61cade082eb", "address": "fa:16:3e:84:e6:01", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed590d2-cf", "ovs_interfaceid": "bed590d2-cf12-4135-a164-a61cade082eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.741800] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065814} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.742083] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 943.742576] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-4de1c06d-3261-4447-b5bc-a21a91f7a812" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.742829] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Instance network_info: |[{"id": "f0a69691-e627-471b-bf2c-1705c8a4f373", "address": "fa:16:3e:61:5a:f6", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0a69691-e6", "ovs_interfaceid": "f0a69691-e627-471b-bf2c-1705c8a4f373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 943.743617] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f412c71-cf34-4e21-bd9d-3ea3a8d49b36 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.746185] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:5a:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0a69691-e627-471b-bf2c-1705c8a4f373', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 943.753407] env[62923]: DEBUG oslo.service.loopingcall [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.753684] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 943.754492] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5478438f-7dd4-422e-8707-858d4f3e35b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.785917] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d/2a9a93f8-9398-4a19-a149-a1092ceb416d.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 943.786778] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1cf2130-30f3-4b9b-bb22-0da3070421f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.801760] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.801760] env[62923]: value = "task-1370213" [ 943.801760] env[62923]: _type = "Task" [ 943.801760] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.807444] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 943.807444] env[62923]: value = "task-1370214" [ 943.807444] env[62923]: _type = "Task" [ 943.807444] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.813592] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370213, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.829878] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370214, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.866584] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370211, 'name': PowerOffVM_Task, 'duration_secs': 0.20153} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.866854] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.867035] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 943.867321] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87677209-1642-4e22-9477-ec8897a5b04a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.882026] env[62923]: DEBUG oslo_vmware.api [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370212, 'name': PowerOnVM_Task, 'duration_secs': 0.4966} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.882026] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.882026] env[62923]: INFO nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Took 8.28 seconds to spawn the instance on the hypervisor. [ 943.882302] env[62923]: DEBUG nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 943.883029] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e3f0fe-3881-4143-a6f7-e5c8e3c4727c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.889665] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.891243] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.891243] env[62923]: DEBUG nova.objects.instance [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lazy-loading 'resources' on Instance uuid 0a9fdd83-3818-4831-90f9-9d30713961c5 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.948631] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 943.948874] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 943.949070] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleting the datastore file [datastore2] 41cc788d-9be8-4959-9cef-d91304f5879d {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.949440] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd98d1f1-6939-4688-bc5f-1746919e36cd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.956353] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 943.956353] env[62923]: value = "task-1370216" [ 943.956353] env[62923]: _type = "Task" [ 943.956353] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.964277] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.042718] env[62923]: DEBUG oslo_concurrency.lockutils [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] Releasing lock "refresh_cache-b145b71c-c56b-4872-bb61-fa3e65fef04f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.043015] env[62923]: DEBUG nova.compute.manager [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Received event network-vif-deleted-69601284-7be9-4b00-9fde-93089f7b51c8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.043201] env[62923]: INFO nova.compute.manager [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Neutron deleted interface 69601284-7be9-4b00-9fde-93089f7b51c8; detaching it from the instance and deleting it from the info cache [ 944.043457] env[62923]: DEBUG nova.network.neutron [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.200762] env[62923]: INFO nova.network.neutron [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Port 1353213d-e2e0-4537-a849-37be48c686ac from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 944.201291] env[62923]: DEBUG nova.network.neutron [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.313394] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370213, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.321209] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370214, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.408294] env[62923]: INFO nova.compute.manager [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Took 17.55 seconds to build instance. [ 944.467045] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.550482] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28424e7a-ad39-4dfb-ba40-4cc41c12921d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.561792] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9d5775-d55b-4221-a372-a6580f51602d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.599300] env[62923]: DEBUG nova.compute.manager [req-67af149c-79e2-4842-a680-138d5487ba4b req-f9741694-450c-4430-802b-16e0bfbc5e75 service nova] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Detach interface failed, port_id=69601284-7be9-4b00-9fde-93089f7b51c8, reason: Instance 0a9fdd83-3818-4831-90f9-9d30713961c5 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 944.661060] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7de97a-1837-4c39-9f7f-55504e2d5eba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.670444] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2693fbec-c923-4921-8b50-4d1edf835d8e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.703361] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c11a316-66ca-4f76-9b4c-76a35a3e200f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.706420] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.714359] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438c330b-3ea4-4c09-b8ca-83ad595f9470 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.730051] env[62923]: DEBUG nova.compute.provider_tree [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.813567] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370213, 'name': CreateVM_Task, 'duration_secs': 0.746664} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.818551] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 944.818551] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.818551] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.818551] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 944.819106] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9949b609-5e77-4a50-a624-61d92fcc6704 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.823531] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370214, 'name': ReconfigVM_Task, 'duration_secs': 0.965171} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.824163] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d/2a9a93f8-9398-4a19-a149-a1092ceb416d.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.825665] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a7cf98d-08a2-4aa5-affa-67c025d1b4c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.828756] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 944.828756] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5209a4e2-c6b4-dfc1-9638-5f8a20488fb4" [ 944.828756] env[62923]: _type = "Task" [ 944.828756] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.833036] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 944.833036] env[62923]: value = "task-1370217" [ 944.833036] env[62923]: _type = "Task" [ 944.833036] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.839706] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5209a4e2-c6b4-dfc1-9638-5f8a20488fb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.842210] env[62923]: DEBUG nova.compute.manager [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 944.842397] env[62923]: DEBUG nova.compute.manager [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing instance network info cache due to event network-changed-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 944.842654] env[62923]: DEBUG oslo_concurrency.lockutils [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] Acquiring lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.842841] env[62923]: DEBUG oslo_concurrency.lockutils [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] Acquired lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.843016] env[62923]: DEBUG nova.network.neutron [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Refreshing network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 944.850218] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370217, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.911145] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16f4c817-b2da-499d-8bce-9160e0126953 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.068s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.967058] env[62923]: DEBUG oslo_vmware.api [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.514912} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.967311] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.967507] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.967692] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.967887] env[62923]: INFO nova.compute.manager [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Took 1.63 seconds to destroy the instance on the hypervisor. [ 944.968190] env[62923]: DEBUG oslo.service.loopingcall [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.968404] env[62923]: DEBUG nova.compute.manager [-] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 944.968501] env[62923]: DEBUG nova.network.neutron [-] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.144044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "eaa654f9-023d-4514-930d-6bebd421325a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.146232] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.146232] env[62923]: DEBUG nova.compute.manager [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 945.146630] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b91d0d2-222d-4fab-98b3-0234dfd385ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.156023] env[62923]: DEBUG nova.compute.manager [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 945.156023] env[62923]: DEBUG nova.objects.instance [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'flavor' on Instance uuid eaa654f9-023d-4514-930d-6bebd421325a {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.210616] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e5bfd867-6385-40f0-9633-2a7322352bd7 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-1353213d-e2e0-4537-a849-37be48c686ac" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.653s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.233838] env[62923]: DEBUG nova.scheduler.client.report [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.293422] env[62923]: DEBUG nova.compute.manager [req-670f49fb-0bc2-491c-bf00-ee4c3cb280dc req-bd98eb79-bb18-436f-bd86-9b968a94d73b service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Received event network-vif-deleted-24cd4887-ac99-48ba-bf0a-e5077d525b6c {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.293422] env[62923]: INFO nova.compute.manager [req-670f49fb-0bc2-491c-bf00-ee4c3cb280dc req-bd98eb79-bb18-436f-bd86-9b968a94d73b service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Neutron deleted interface 24cd4887-ac99-48ba-bf0a-e5077d525b6c; detaching it from the instance and deleting it from the info cache [ 945.293645] env[62923]: DEBUG nova.network.neutron [req-670f49fb-0bc2-491c-bf00-ee4c3cb280dc req-bd98eb79-bb18-436f-bd86-9b968a94d73b service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.340293] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5209a4e2-c6b4-dfc1-9638-5f8a20488fb4, 'name': SearchDatastore_Task, 'duration_secs': 0.012921} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.340936] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.341213] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.341454] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.341601] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.341785] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.342093] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bb30905-5d60-44c7-a682-b70ae22a5f11 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.346489] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370217, 'name': Rename_Task, 'duration_secs': 0.347471} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.347054] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.347317] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3fbfca6-379e-408e-bb00-505777cfc885 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.353847] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 945.353847] env[62923]: value = "task-1370218" [ 945.353847] env[62923]: _type = "Task" [ 945.353847] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.358166] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.358352] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.359336] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54470db9-e6a7-4c3b-9b54-29ea55bdac51 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.365288] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.368237] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 945.368237] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527bafd9-f0a0-d614-2205-955a50deda32" [ 945.368237] env[62923]: _type = "Task" [ 945.368237] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.377012] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527bafd9-f0a0-d614-2205-955a50deda32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.535427] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "interface-f52f5912-d6e8-4da5-ac39-65bb065b6555-1353213d-e2e0-4537-a849-37be48c686ac" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.535712] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-f52f5912-d6e8-4da5-ac39-65bb065b6555-1353213d-e2e0-4537-a849-37be48c686ac" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.536091] env[62923]: DEBUG nova.objects.instance [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lazy-loading 'flavor' on Instance uuid f52f5912-d6e8-4da5-ac39-65bb065b6555 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.569179] env[62923]: DEBUG nova.network.neutron [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updated VIF entry in instance network info cache for port cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 945.569477] env[62923]: DEBUG nova.network.neutron [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [{"id": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "address": "fa:16:3e:59:5e:e2", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcea0b4ee-b6", "ovs_interfaceid": "cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.660543] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.660808] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4cf5a27-dc22-4945-ad28-b5b2f95a6c1d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.668007] env[62923]: DEBUG oslo_vmware.api [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 945.668007] env[62923]: value = "task-1370219" [ 945.668007] env[62923]: _type = "Task" [ 945.668007] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.676394] env[62923]: DEBUG oslo_vmware.api [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.742567] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.767190] env[62923]: DEBUG nova.network.neutron [-] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.771236] env[62923]: INFO nova.scheduler.client.report [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 0a9fdd83-3818-4831-90f9-9d30713961c5 [ 945.798692] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f573e65-6cee-4bb7-acd2-68868cbd1d7f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.810487] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3995741b-375b-402f-8da8-d1c707230ded {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.852563] env[62923]: DEBUG nova.compute.manager [req-670f49fb-0bc2-491c-bf00-ee4c3cb280dc req-bd98eb79-bb18-436f-bd86-9b968a94d73b service nova] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Detach interface failed, port_id=24cd4887-ac99-48ba-bf0a-e5077d525b6c, reason: Instance 41cc788d-9be8-4959-9cef-d91304f5879d could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 945.864992] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370218, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.883020] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527bafd9-f0a0-d614-2205-955a50deda32, 'name': SearchDatastore_Task, 'duration_secs': 0.020119} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.883020] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80f4cb13-69f6-4a32-ba9f-700546cacab2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.887659] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 945.887659] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52800017-7b70-ea7a-22d3-89cd0d42c96b" [ 945.887659] env[62923]: _type = "Task" [ 945.887659] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.897273] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52800017-7b70-ea7a-22d3-89cd0d42c96b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.074247] env[62923]: DEBUG oslo_concurrency.lockutils [req-9457d74f-0ea9-45c6-a215-68baed26062a req-27555596-f01f-4011-bf3d-751607ed4438 service nova] Releasing lock "refresh_cache-8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.171055] env[62923]: DEBUG nova.objects.instance [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lazy-loading 'pci_requests' on Instance uuid f52f5912-d6e8-4da5-ac39-65bb065b6555 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.181526] env[62923]: DEBUG oslo_vmware.api [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370219, 'name': PowerOffVM_Task, 'duration_secs': 0.208896} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.181776] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.181966] env[62923]: DEBUG nova.compute.manager [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.182826] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfc7552-6a11-45f3-ba78-040e1d4ec277 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.270753] env[62923]: INFO nova.compute.manager [-] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Took 1.30 seconds to deallocate network for instance. [ 946.278404] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a6252c8-5008-4227-860f-a380bfe298c0 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "0a9fdd83-3818-4831-90f9-9d30713961c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.257s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.364525] env[62923]: DEBUG oslo_vmware.api [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370218, 'name': PowerOnVM_Task, 'duration_secs': 0.754904} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.364894] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.365009] env[62923]: INFO nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Took 8.33 seconds to spawn the instance on the hypervisor. [ 946.365197] env[62923]: DEBUG nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.366018] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9e1987-cc08-4b0a-83a0-7193b5a4bf1d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.398974] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52800017-7b70-ea7a-22d3-89cd0d42c96b, 'name': SearchDatastore_Task, 'duration_secs': 0.010662} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.399718] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.400718] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 4de1c06d-3261-4447-b5bc-a21a91f7a812/4de1c06d-3261-4447-b5bc-a21a91f7a812.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 946.400718] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-818df78d-4d4d-4ed3-9229-02fc24348650 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.408846] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 946.408846] env[62923]: value = "task-1370220" [ 946.408846] env[62923]: _type = "Task" [ 946.408846] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.419108] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.676212] env[62923]: DEBUG nova.objects.base [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 946.676425] env[62923]: DEBUG nova.network.neutron [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.695196] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6793a995-54e2-40ea-96d8-1cf75d5f1426 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.551s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.729339] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.730022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.760915] env[62923]: DEBUG nova.policy [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c37debff078b4389813658cbad297e65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0db41047d1004a1d9ca7f663178058da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 946.779384] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.779850] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.780138] env[62923]: DEBUG nova.objects.instance [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lazy-loading 'resources' on Instance uuid 41cc788d-9be8-4959-9cef-d91304f5879d {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.886578] env[62923]: INFO nova.compute.manager [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Took 13.25 seconds to build instance. [ 946.924169] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370220, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.209996] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "2d7bceb2-450c-4747-bedb-aa9848450ca9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.210224] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.233681] env[62923]: DEBUG nova.compute.utils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.390498] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88edb98a-4f8c-423c-bf85-60048bf4edef tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.766s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.420054] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58319} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.420331] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 4de1c06d-3261-4447-b5bc-a21a91f7a812/4de1c06d-3261-4447-b5bc-a21a91f7a812.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.420561] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.420814] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91e01472-447d-4ec4-9825-af67ad76a6d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.427363] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 947.427363] env[62923]: value = "task-1370221" [ 947.427363] env[62923]: _type = "Task" [ 947.427363] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.436922] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.475195] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34b89e7-ed2d-446f-aff4-3c259b9649ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.482388] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292cc47f-147a-410a-9f81-4eb50e91cacb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.511643] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b60fab9-88c7-474c-a5f7-4491056402ef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.518916] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22dfc4e-0f58-4f9b-a775-65e773f2c4fb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.532811] env[62923]: DEBUG nova.compute.provider_tree [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.712018] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 947.736072] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.937627] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065688} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.937952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.938770] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9f510f-926a-427b-b1b9-82d023afa674 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.960227] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 4de1c06d-3261-4447-b5bc-a21a91f7a812/4de1c06d-3261-4447-b5bc-a21a91f7a812.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.960503] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1107c81f-3d64-458a-a997-f43a5cbbc915 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.980997] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 947.980997] env[62923]: value = "task-1370222" [ 947.980997] env[62923]: _type = "Task" [ 947.980997] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.988702] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.035408] env[62923]: DEBUG nova.scheduler.client.report [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.230869] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.260241] env[62923]: DEBUG nova.network.neutron [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Successfully updated port: 1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.401110] env[62923]: DEBUG nova.compute.manager [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.401349] env[62923]: DEBUG nova.compute.manager [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing instance network info cache due to event network-changed-3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.401592] env[62923]: DEBUG oslo_concurrency.lockutils [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.401765] env[62923]: DEBUG oslo_concurrency.lockutils [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.401928] env[62923]: DEBUG nova.network.neutron [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.491545] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370222, 'name': ReconfigVM_Task, 'duration_secs': 0.368523} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.491917] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 4de1c06d-3261-4447-b5bc-a21a91f7a812/4de1c06d-3261-4447-b5bc-a21a91f7a812.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.492616] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d64a4a23-2b6c-4d2e-9634-c0c5a82b638d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.499010] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 948.499010] env[62923]: value = "task-1370223" [ 948.499010] env[62923]: _type = "Task" [ 948.499010] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.507909] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370223, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.540367] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.542581] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.312s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.544082] env[62923]: INFO nova.compute.claims [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.558889] env[62923]: INFO nova.scheduler.client.report [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted allocations for instance 41cc788d-9be8-4959-9cef-d91304f5879d [ 948.684581] env[62923]: DEBUG nova.compute.manager [req-942e64a0-0c70-4fbd-9415-991e28d6c5b2 req-2d1eaee2-8ac7-4354-95f0-6008b55a5659 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-vif-plugged-1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.684812] env[62923]: DEBUG oslo_concurrency.lockutils [req-942e64a0-0c70-4fbd-9415-991e28d6c5b2 req-2d1eaee2-8ac7-4354-95f0-6008b55a5659 service nova] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.685037] env[62923]: DEBUG oslo_concurrency.lockutils [req-942e64a0-0c70-4fbd-9415-991e28d6c5b2 req-2d1eaee2-8ac7-4354-95f0-6008b55a5659 service nova] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.685221] env[62923]: DEBUG oslo_concurrency.lockutils [req-942e64a0-0c70-4fbd-9415-991e28d6c5b2 req-2d1eaee2-8ac7-4354-95f0-6008b55a5659 service nova] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.685390] env[62923]: DEBUG nova.compute.manager [req-942e64a0-0c70-4fbd-9415-991e28d6c5b2 req-2d1eaee2-8ac7-4354-95f0-6008b55a5659 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] No waiting events found dispatching network-vif-plugged-1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 948.685603] env[62923]: WARNING nova.compute.manager [req-942e64a0-0c70-4fbd-9415-991e28d6c5b2 req-2d1eaee2-8ac7-4354-95f0-6008b55a5659 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received unexpected event network-vif-plugged-1353213d-e2e0-4537-a849-37be48c686ac for instance with vm_state active and task_state None. [ 948.763473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.797831] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.798119] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.798357] env[62923]: INFO nova.compute.manager [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Attaching volume f98451ba-8f2b-4010-bb20-e6959423a29c to /dev/sdb [ 948.841565] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ebc978-12bf-4491-b669-ab2d95f9dccb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.849295] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a68b93d-48b9-41aa-ac0b-e643b5753592 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.862884] env[62923]: DEBUG nova.virt.block_device [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating existing volume attachment record: f7534955-27d1-4b82-8561-46fd9e1a17fe {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 949.008711] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370223, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.066183] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d950181a-f32d-4677-8355-456e78e89838 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "41cc788d-9be8-4959-9cef-d91304f5879d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.731s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.137944] env[62923]: DEBUG nova.network.neutron [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updated VIF entry in instance network info cache for port 3adfc18d-e45f-4eb0-8019-d5531853f63f. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 949.138366] env[62923]: DEBUG nova.network.neutron [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.331654] env[62923]: INFO nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Rebuilding instance [ 949.378155] env[62923]: DEBUG nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 949.379034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9c8d70-651c-48a4-a9c8-040cc3678f94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.509745] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370223, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.641978] env[62923]: DEBUG oslo_concurrency.lockutils [req-1cdc9cef-08bc-4a35-a235-9a9cf2959e67 req-b8335ba8-0d64-4b83-9a79-995619335b7a service nova] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.642436] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.642626] env[62923]: DEBUG nova.network.neutron [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.721733] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61de0a17-b368-45c7-9d24-4f5c0a815235 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.730706] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544987b4-0b00-4e66-a5f2-8acc3111de04 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.762358] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821e2100-c05a-4cf0-b884-cce6dc6cad5e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.771261] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e24c190-7dc8-4d59-8108-254bdacb584a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.796971] env[62923]: DEBUG nova.compute.provider_tree [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.890428] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.890724] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96ed3416-d124-4039-a639-17a4613620d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.898227] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 949.898227] env[62923]: value = "task-1370227" [ 949.898227] env[62923]: _type = "Task" [ 949.898227] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.907433] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] VM already powered off {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 949.907671] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.908465] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93591fd-0f3f-47f7-b595-8c305edc687e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.915053] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.915304] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dca39e0a-9eee-40b8-accf-32487d193f75 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.982651] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.982924] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.982924] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore1] eaa654f9-023d-4514-930d-6bebd421325a {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.983921] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2d20cb5-24db-499f-aa18-94103de7c83c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.990059] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 949.990059] env[62923]: value = "task-1370229" [ 949.990059] env[62923]: _type = "Task" [ 949.990059] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.998852] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.009259] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370223, 'name': Rename_Task, 'duration_secs': 1.261384} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.009523] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 950.009821] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c5371ad-ef80-4de1-901e-89b5dcf2d79d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.015639] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 950.015639] env[62923]: value = "task-1370230" [ 950.015639] env[62923]: _type = "Task" [ 950.015639] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.022966] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.183310] env[62923]: WARNING nova.network.neutron [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] dc0481d3-aa80-48c6-bea8-294b2d1f77ec already exists in list: networks containing: ['dc0481d3-aa80-48c6-bea8-294b2d1f77ec']. ignoring it [ 950.289140] env[62923]: DEBUG nova.compute.manager [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Stashing vm_state: active {{(pid=62923) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 950.300347] env[62923]: DEBUG nova.scheduler.client.report [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.355775] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.356058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.356263] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.356442] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.356629] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.358757] env[62923]: INFO nova.compute.manager [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Terminating instance [ 950.361289] env[62923]: DEBUG nova.compute.manager [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 950.361289] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.361493] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cd635e-0e70-4ec4-8ae0-03357615d6d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.369139] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.369386] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9e8adf3-ca64-44ae-a3a1-6b490f0bd6a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.375998] env[62923]: DEBUG oslo_vmware.api [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 950.375998] env[62923]: value = "task-1370231" [ 950.375998] env[62923]: _type = "Task" [ 950.375998] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.384320] env[62923]: DEBUG oslo_vmware.api [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370231, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.491036] env[62923]: DEBUG nova.network.neutron [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1353213d-e2e0-4537-a849-37be48c686ac", "address": "fa:16:3e:f6:2a:9f", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1353213d-e2", "ovs_interfaceid": "1353213d-e2e0-4537-a849-37be48c686ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.501548] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159116} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.501879] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.502092] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.502260] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.526135] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370230, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.707370] env[62923]: DEBUG nova.compute.manager [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-changed-1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.707600] env[62923]: DEBUG nova.compute.manager [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing instance network info cache due to event network-changed-1353213d-e2e0-4537-a849-37be48c686ac. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 950.707837] env[62923]: DEBUG oslo_concurrency.lockutils [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.805230] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.805883] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 950.809315] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.809551] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.818241] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "906da59a-24ac-4486-a835-62d3f81d3683" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.818473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "906da59a-24ac-4486-a835-62d3f81d3683" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.886169] env[62923]: DEBUG oslo_vmware.api [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370231, 'name': PowerOffVM_Task, 'duration_secs': 0.219755} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.886453] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.886655] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.886906] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95ff306e-0712-4869-9dc5-3b374bfd6a1b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.977647] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.977952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.978167] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleting the datastore file [datastore2] 60805eeb-8287-4064-9bd3-a7c6a21f40b5 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.978434] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-861450e4-db67-42c5-8ce3-18d15ed1e899 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.985013] env[62923]: DEBUG oslo_vmware.api [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 950.985013] env[62923]: value = "task-1370233" [ 950.985013] env[62923]: _type = "Task" [ 950.985013] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.992631] env[62923]: DEBUG oslo_vmware.api [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.996242] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.996913] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.997107] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.997365] env[62923]: DEBUG oslo_concurrency.lockutils [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.997543] env[62923]: DEBUG nova.network.neutron [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Refreshing network info cache for port 1353213d-e2e0-4537-a849-37be48c686ac {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.999176] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dac655a-c1de-481e-8ccd-54e53ed6ce26 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.019575] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.019820] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.019980] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.020186] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.020335] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.020482] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.020683] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.020844] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.021018] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.021182] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.021352] env[62923]: DEBUG nova.virt.hardware [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.027576] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Reconfiguring VM to attach interface {{(pid=62923) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 951.028610] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8a9b337-b7ed-404f-85d5-ce8f48facecd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.050050] env[62923]: DEBUG oslo_vmware.api [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370230, 'name': PowerOnVM_Task, 'duration_secs': 0.588353} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.051329] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 951.051561] env[62923]: INFO nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Took 10.63 seconds to spawn the instance on the hypervisor. [ 951.051745] env[62923]: DEBUG nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 951.052356] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 951.052356] env[62923]: value = "task-1370234" [ 951.052356] env[62923]: _type = "Task" [ 951.052356] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.053041] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3772128-0ace-4cb6-af14-e110e28d36dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.067852] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.312730] env[62923]: DEBUG nova.compute.utils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 951.316125] env[62923]: INFO nova.compute.claims [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.319770] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 951.320212] env[62923]: DEBUG nova.network.neutron [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.322364] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 951.374619] env[62923]: DEBUG nova.policy [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 951.494725] env[62923]: DEBUG oslo_vmware.api [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277225} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.494725] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.494847] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.494954] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.495141] env[62923]: INFO nova.compute.manager [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 951.495380] env[62923]: DEBUG oslo.service.loopingcall [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.495650] env[62923]: DEBUG nova.compute.manager [-] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 951.495723] env[62923]: DEBUG nova.network.neutron [-] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.556364] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.556730] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.556775] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.556951] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.557087] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.557244] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.557652] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.557856] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.558055] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.558230] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.558426] env[62923]: DEBUG nova.virt.hardware [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.559655] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6c01b1-5e31-4f11-aba4-27faac66ef4a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.582834] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f4ff8d-fdaf-4f72-a824-414eff207122 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.587737] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.590628] env[62923]: INFO nova.compute.manager [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Took 16.92 seconds to build instance. [ 951.604076] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:71:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1ad0989-e12d-4073-92b4-3a53bf5b8eb2', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.611721] env[62923]: DEBUG oslo.service.loopingcall [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 951.612122] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.612366] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01271b8a-3454-4e47-b8c4-1a9610252a0c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.634476] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.634476] env[62923]: value = "task-1370236" [ 951.634476] env[62923]: _type = "Task" [ 951.634476] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.644018] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370236, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.645689] env[62923]: DEBUG nova.network.neutron [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Successfully created port: ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.820856] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 951.824792] env[62923]: INFO nova.compute.resource_tracker [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating resource usage from migration 26465ae3-2214-45b4-8a00-0bc17e0a47ab [ 951.857352] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.973077] env[62923]: DEBUG nova.network.neutron [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updated VIF entry in instance network info cache for port 1353213d-e2e0-4537-a849-37be48c686ac. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.973471] env[62923]: DEBUG nova.network.neutron [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1353213d-e2e0-4537-a849-37be48c686ac", "address": "fa:16:3e:f6:2a:9f", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1353213d-e2", "ovs_interfaceid": "1353213d-e2e0-4537-a849-37be48c686ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.072030] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.072030] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe2a539-4ff5-4f6e-bb41-cf29001b9af0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.076746] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995b93dc-09b2-4550-aec6-195b77d1ff48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.112749] env[62923]: DEBUG oslo_concurrency.lockutils [None req-43528109-9f8c-490b-81a6-768ef31ad84a tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.450s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.116949] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674c1481-1970-4005-b36b-3383d55afff2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.131384] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc35d98-a9f2-476d-9caa-1e0ce5f6ff7b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.139385] env[62923]: DEBUG nova.compute.manager [req-19aceb5e-2551-46ef-a742-e86d592d489a req-56337761-6229-4006-bd44-64140f79726a service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Received event network-vif-deleted-a126841e-2aec-49ea-b70c-e16e15f30bad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 952.139659] env[62923]: INFO nova.compute.manager [req-19aceb5e-2551-46ef-a742-e86d592d489a req-56337761-6229-4006-bd44-64140f79726a service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Neutron deleted interface a126841e-2aec-49ea-b70c-e16e15f30bad; detaching it from the instance and deleting it from the info cache [ 952.139921] env[62923]: DEBUG nova.network.neutron [req-19aceb5e-2551-46ef-a742-e86d592d489a req-56337761-6229-4006-bd44-64140f79726a service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.159023] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370236, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.160563] env[62923]: DEBUG nova.compute.provider_tree [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.460261] env[62923]: DEBUG nova.network.neutron [-] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.478434] env[62923]: DEBUG oslo_concurrency.lockutils [req-1de7790f-5f08-468e-9de4-f15f9d9f4ff7 req-609b9f4b-3305-4bfc-8b67-dd1c0176897e service nova] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.569940] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.628662] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c965a0cd-c91c-40bd-87ca-cd4848d3d1c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.635178] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Suspending the VM {{(pid=62923) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 952.635449] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fb1369c4-4639-4f20-ab31-80b85d8af09c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.642196] env[62923]: DEBUG oslo_vmware.api [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 952.642196] env[62923]: value = "task-1370237" [ 952.642196] env[62923]: _type = "Task" [ 952.642196] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.648369] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370236, 'name': CreateVM_Task, 'duration_secs': 0.618202} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.648854] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc4861e6-9a7d-49b6-98c9-62c66fcdc458 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.650438] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 952.653634] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.653778] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.654121] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 952.654366] env[62923]: DEBUG oslo_vmware.api [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370237, 'name': SuspendVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.654862] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22b6fc0e-bee2-49ca-9dd4-98d24fc4cb3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.661314] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839f2f68-68a0-4fd9-9a1f-0b36db6c6cba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.671335] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 952.671335] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5242d815-6a5c-8816-c522-e3fb589088cb" [ 952.671335] env[62923]: _type = "Task" [ 952.671335] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.672169] env[62923]: DEBUG nova.scheduler.client.report [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.687189] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5242d815-6a5c-8816-c522-e3fb589088cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.697713] env[62923]: DEBUG nova.compute.manager [req-19aceb5e-2551-46ef-a742-e86d592d489a req-56337761-6229-4006-bd44-64140f79726a service nova] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Detach interface failed, port_id=a126841e-2aec-49ea-b70c-e16e15f30bad, reason: Instance 60805eeb-8287-4064-9bd3-a7c6a21f40b5 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 952.833776] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 952.858587] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.858842] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.859015] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.859215] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.859364] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.859512] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.859717] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.859969] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.860230] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.860441] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.860642] env[62923]: DEBUG nova.virt.hardware [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.861515] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cdecb3-cf57-40b7-b5d9-90d55ba88f28 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.869670] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3329e50c-e02e-447a-9e22-9a68ccfa1f95 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.963190] env[62923]: INFO nova.compute.manager [-] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Took 1.47 seconds to deallocate network for instance. [ 953.071999] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.153693] env[62923]: DEBUG oslo_vmware.api [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370237, 'name': SuspendVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.176985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.367s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.177260] env[62923]: INFO nova.compute.manager [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Migrating [ 953.188860] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.332s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.190518] env[62923]: INFO nova.compute.claims [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.201206] env[62923]: DEBUG nova.network.neutron [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Successfully updated port: ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.208817] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5242d815-6a5c-8816-c522-e3fb589088cb, 'name': SearchDatastore_Task, 'duration_secs': 0.049475} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.209218] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.209324] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 953.209552] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.209881] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.210129] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.210424] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3848717-bd9e-41a6-bb07-b35132f46f4a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.220326] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.220548] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 953.221917] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570e78db-602d-4f6e-b0d3-12d33238a29b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.229135] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 953.229135] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52242f56-d2de-bf7c-666c-6082049d70d1" [ 953.229135] env[62923]: _type = "Task" [ 953.229135] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.240284] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52242f56-d2de-bf7c-666c-6082049d70d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.409064] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 953.409254] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291505', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'name': 'volume-f98451ba-8f2b-4010-bb20-e6959423a29c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'serial': 'f98451ba-8f2b-4010-bb20-e6959423a29c'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 953.410175] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8ceecc-8a69-4742-ae1a-b09201620fed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.428188] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0d5de7-93fd-48d0-9721-0a05feead109 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.431710] env[62923]: DEBUG nova.compute.manager [req-a2e7030e-fee1-4e51-a10f-6c21efba4ade req-46e904e9-0c8e-48dc-9ad5-5cb725259fe6 service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Received event network-vif-plugged-ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.432211] env[62923]: DEBUG oslo_concurrency.lockutils [req-a2e7030e-fee1-4e51-a10f-6c21efba4ade req-46e904e9-0c8e-48dc-9ad5-5cb725259fe6 service nova] Acquiring lock "2d7bceb2-450c-4747-bedb-aa9848450ca9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.432211] env[62923]: DEBUG oslo_concurrency.lockutils [req-a2e7030e-fee1-4e51-a10f-6c21efba4ade req-46e904e9-0c8e-48dc-9ad5-5cb725259fe6 service nova] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.432470] env[62923]: DEBUG oslo_concurrency.lockutils [req-a2e7030e-fee1-4e51-a10f-6c21efba4ade req-46e904e9-0c8e-48dc-9ad5-5cb725259fe6 service nova] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.432470] env[62923]: DEBUG nova.compute.manager [req-a2e7030e-fee1-4e51-a10f-6c21efba4ade req-46e904e9-0c8e-48dc-9ad5-5cb725259fe6 service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] No waiting events found dispatching network-vif-plugged-ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 953.432645] env[62923]: WARNING nova.compute.manager [req-a2e7030e-fee1-4e51-a10f-6c21efba4ade req-46e904e9-0c8e-48dc-9ad5-5cb725259fe6 service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Received unexpected event network-vif-plugged-ee920d2c-b952-40c2-aa5d-be3d494020ee for instance with vm_state building and task_state spawning. [ 953.458312] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-f98451ba-8f2b-4010-bb20-e6959423a29c/volume-f98451ba-8f2b-4010-bb20-e6959423a29c.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.458312] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c093f8c-ea8f-4bdb-ad5f-1cb05a33832b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.472344] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.478499] env[62923]: DEBUG oslo_vmware.api [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 953.478499] env[62923]: value = "task-1370238" [ 953.478499] env[62923]: _type = "Task" [ 953.478499] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.486285] env[62923]: DEBUG oslo_vmware.api [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370238, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.570122] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.655876] env[62923]: DEBUG oslo_vmware.api [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370237, 'name': SuspendVM_Task} progress is 62%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.704020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.704020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.704020] env[62923]: DEBUG nova.network.neutron [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.710197] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-2d7bceb2-450c-4747-bedb-aa9848450ca9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.710357] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-2d7bceb2-450c-4747-bedb-aa9848450ca9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.710491] env[62923]: DEBUG nova.network.neutron [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.740977] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52242f56-d2de-bf7c-666c-6082049d70d1, 'name': SearchDatastore_Task, 'duration_secs': 0.01385} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.741812] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72b22f6b-5059-40c1-a162-2afac2b996e7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.747621] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 953.747621] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e17e56-e93f-8874-739b-c7fd3006ff5e" [ 953.747621] env[62923]: _type = "Task" [ 953.747621] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.757225] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e17e56-e93f-8874-739b-c7fd3006ff5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.988239] env[62923]: DEBUG oslo_vmware.api [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370238, 'name': ReconfigVM_Task, 'duration_secs': 0.424671} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.988604] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-f98451ba-8f2b-4010-bb20-e6959423a29c/volume-f98451ba-8f2b-4010-bb20-e6959423a29c.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.993588] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3a33111-b8f8-47f8-b87b-5e288b706d84 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.008727] env[62923]: DEBUG oslo_vmware.api [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 954.008727] env[62923]: value = "task-1370239" [ 954.008727] env[62923]: _type = "Task" [ 954.008727] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.016587] env[62923]: DEBUG oslo_vmware.api [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370239, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.070866] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.154520] env[62923]: DEBUG oslo_vmware.api [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370237, 'name': SuspendVM_Task, 'duration_secs': 1.158562} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.154745] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Suspended the VM {{(pid=62923) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 954.154927] env[62923]: DEBUG nova.compute.manager [None req-c7562afc-81b7-4034-a0a2-9355f0131018 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 954.155709] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edc09f0-1f06-40b4-88dd-8aba768851ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.246494] env[62923]: DEBUG nova.network.neutron [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.263908] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e17e56-e93f-8874-739b-c7fd3006ff5e, 'name': SearchDatastore_Task, 'duration_secs': 0.024242} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.263908] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.263908] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 954.263908] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9dcb93f-8eb1-4658-a64c-4f50c602801c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.271123] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 954.271123] env[62923]: value = "task-1370240" [ 954.271123] env[62923]: _type = "Task" [ 954.271123] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.280622] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.387125] env[62923]: DEBUG nova.network.neutron [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Updating instance_info_cache with network_info: [{"id": "ee920d2c-b952-40c2-aa5d-be3d494020ee", "address": "fa:16:3e:2f:d8:b5", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee920d2c-b9", "ovs_interfaceid": "ee920d2c-b952-40c2-aa5d-be3d494020ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.403654] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d962016-9eb6-4cb1-b87e-a730f359a739 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.411848] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619fd42d-2f1a-494a-80d8-2cdb4c1e058a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.444886] env[62923]: DEBUG nova.network.neutron [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance_info_cache with network_info: [{"id": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "address": "fa:16:3e:3e:65:69", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap422a6526-df", "ovs_interfaceid": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.446665] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69354bde-e3ec-49ba-aee0-e1ab4b203514 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.454433] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b033dbd4-140b-4349-9564-8c1d6cb3c0a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.469376] env[62923]: DEBUG nova.compute.provider_tree [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.521082] env[62923]: DEBUG oslo_vmware.api [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370239, 'name': ReconfigVM_Task, 'duration_secs': 0.213348} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.521324] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291505', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'name': 'volume-f98451ba-8f2b-4010-bb20-e6959423a29c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'serial': 'f98451ba-8f2b-4010-bb20-e6959423a29c'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 954.571707] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.781855] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48422} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.782252] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.782618] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.782910] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3a7a90c-3b7c-4de2-b571-19f1e20af5ba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.790235] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 954.790235] env[62923]: value = "task-1370241" [ 954.790235] env[62923]: _type = "Task" [ 954.790235] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.798781] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370241, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.889793] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-2d7bceb2-450c-4747-bedb-aa9848450ca9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.890090] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Instance network_info: |[{"id": "ee920d2c-b952-40c2-aa5d-be3d494020ee", "address": "fa:16:3e:2f:d8:b5", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee920d2c-b9", "ovs_interfaceid": "ee920d2c-b952-40c2-aa5d-be3d494020ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 954.890547] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:d8:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee920d2c-b952-40c2-aa5d-be3d494020ee', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.901304] env[62923]: DEBUG oslo.service.loopingcall [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.901598] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.901901] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a64a585-ee2d-4dde-929a-ba07a4785826 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.923827] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.923827] env[62923]: value = "task-1370242" [ 954.923827] env[62923]: _type = "Task" [ 954.923827] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.932738] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370242, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.950626] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.974032] env[62923]: DEBUG nova.scheduler.client.report [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.071039] env[62923]: DEBUG oslo_vmware.api [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370234, 'name': ReconfigVM_Task, 'duration_secs': 3.711825} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.071604] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.071861] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Reconfigured VM to attach interface {{(pid=62923) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 955.211192] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "4de1c06d-3261-4447-b5bc-a21a91f7a812" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.211420] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.211647] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "4de1c06d-3261-4447-b5bc-a21a91f7a812-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.211808] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.212059] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.214201] env[62923]: INFO nova.compute.manager [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Terminating instance [ 955.215992] env[62923]: DEBUG nova.compute.manager [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 955.216203] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.217055] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2082f48c-3332-403f-aeca-c4c0ee22c0ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.224516] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 955.224775] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3717e53b-12a7-41c1-b99a-5d0585433b9c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.300539] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370241, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142107} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.300808] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 955.301596] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed29a0ad-c693-4ab0-9af9-2421f59bf546 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.326568] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 955.327981] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84af5a6c-7832-4d20-906f-475d292d9258 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.341886] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 955.342105] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 955.342291] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleting the datastore file [datastore1] 4de1c06d-3261-4447-b5bc-a21a91f7a812 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 955.342540] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-783cd86b-bbb1-4ec4-b6d1-9ed61e0c2d44 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.350014] env[62923]: DEBUG oslo_vmware.api [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 955.350014] env[62923]: value = "task-1370244" [ 955.350014] env[62923]: _type = "Task" [ 955.350014] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.351660] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 955.351660] env[62923]: value = "task-1370245" [ 955.351660] env[62923]: _type = "Task" [ 955.351660] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.362430] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.365202] env[62923]: DEBUG oslo_vmware.api [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.434218] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370242, 'name': CreateVM_Task, 'duration_secs': 0.366544} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.434393] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.435064] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.435235] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.435585] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 955.435901] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0910199-e779-4e57-9536-8d17629acd03 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.441512] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 955.441512] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bfe1e-b13f-66fc-46aa-8d57d97d97c0" [ 955.441512] env[62923]: _type = "Task" [ 955.441512] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.449769] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bfe1e-b13f-66fc-46aa-8d57d97d97c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.451753] env[62923]: DEBUG nova.compute.manager [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Received event network-changed-ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.451972] env[62923]: DEBUG nova.compute.manager [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Refreshing instance network info cache due to event network-changed-ee920d2c-b952-40c2-aa5d-be3d494020ee. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 955.452272] env[62923]: DEBUG oslo_concurrency.lockutils [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] Acquiring lock "refresh_cache-2d7bceb2-450c-4747-bedb-aa9848450ca9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.452446] env[62923]: DEBUG oslo_concurrency.lockutils [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] Acquired lock "refresh_cache-2d7bceb2-450c-4747-bedb-aa9848450ca9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.452614] env[62923]: DEBUG nova.network.neutron [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Refreshing network info cache for port ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 955.478304] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.479210] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 955.481045] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.009s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.481271] env[62923]: DEBUG nova.objects.instance [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lazy-loading 'resources' on Instance uuid 60805eeb-8287-4064-9bd3-a7c6a21f40b5 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.559588] env[62923]: DEBUG nova.objects.instance [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.576674] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a3b49744-897f-42ba-baa6-ccf00c3ac677 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-f52f5912-d6e8-4da5-ac39-65bb065b6555-1353213d-e2e0-4537-a849-37be48c686ac" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 10.040s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.864922] env[62923]: DEBUG oslo_vmware.api [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195147} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.868007] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 955.868211] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 955.868392] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 955.868570] env[62923]: INFO nova.compute.manager [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Took 0.65 seconds to destroy the instance on the hypervisor. [ 955.868819] env[62923]: DEBUG oslo.service.loopingcall [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.869024] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370245, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.869236] env[62923]: DEBUG nova.compute.manager [-] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 955.869331] env[62923]: DEBUG nova.network.neutron [-] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.952763] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bfe1e-b13f-66fc-46aa-8d57d97d97c0, 'name': SearchDatastore_Task, 'duration_secs': 0.00968} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.953121] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.953424] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.953687] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.953868] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.954108] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.954404] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-984c75e7-6d90-4a76-8703-89dcf287afb2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.963381] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.963624] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.964685] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81f7519c-4b1c-499b-81fe-47c711ed253f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.971725] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 955.971725] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c2f9d5-a6cf-0eb4-824d-652165e17625" [ 955.971725] env[62923]: _type = "Task" [ 955.971725] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.981509] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c2f9d5-a6cf-0eb4-824d-652165e17625, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.983851] env[62923]: DEBUG nova.compute.utils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 955.989175] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 955.989175] env[62923]: DEBUG nova.network.neutron [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.070359] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a559afe0-5389-43a2-a8c8-4be0fb37920b tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.271s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.094630] env[62923]: DEBUG nova.policy [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ad76ea94b62472fa3318cbbdb308ebe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1559d2844647aba922cae8e9d992e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 956.158036] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.159324] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.159324] env[62923]: DEBUG nova.compute.manager [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 956.162890] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874d604f-4f58-46b8-97bc-bbae5afd3b72 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.173919] env[62923]: DEBUG nova.compute.manager [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 956.174587] env[62923]: DEBUG nova.objects.instance [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.247963] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45ee0d3-a9e3-4053-9429-ee3220b3f60c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.261393] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21479a1-97a7-4eec-b5df-cb3f16f2c57a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.298353] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43e48d9-6058-4c7c-95c5-4817ee4737be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.306568] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c88812-5a7d-4300-b5e3-4de6d3571489 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.320532] env[62923]: DEBUG nova.compute.provider_tree [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.365734] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370245, 'name': ReconfigVM_Task, 'duration_secs': 0.664149} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.366080] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Reconfigured VM instance instance-00000059 to attach disk [datastore2] eaa654f9-023d-4514-930d-6bebd421325a/eaa654f9-023d-4514-930d-6bebd421325a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.366733] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f408b68-04b6-482a-b08d-9bc4d71a7a00 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.374381] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 956.374381] env[62923]: value = "task-1370246" [ 956.374381] env[62923]: _type = "Task" [ 956.374381] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.383303] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370246, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.503623] env[62923]: DEBUG nova.network.neutron [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Updated VIF entry in instance network info cache for port ee920d2c-b952-40c2-aa5d-be3d494020ee. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.503623] env[62923]: DEBUG nova.network.neutron [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Updating instance_info_cache with network_info: [{"id": "ee920d2c-b952-40c2-aa5d-be3d494020ee", "address": "fa:16:3e:2f:d8:b5", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee920d2c-b9", "ovs_interfaceid": "ee920d2c-b952-40c2-aa5d-be3d494020ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.503623] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0894c45b-9899-4454-9d82-7b92fbbeee7a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.504461] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 956.507270] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 0 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 956.517796] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c2f9d5-a6cf-0eb4-824d-652165e17625, 'name': SearchDatastore_Task, 'duration_secs': 0.012034} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.519608] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd31c5ce-bfa0-421b-9169-c76e5bcc3fa0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.526362] env[62923]: DEBUG nova.network.neutron [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Successfully created port: 4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.530645] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 956.530645] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52dab08b-37a4-58e1-9f8d-5249a852ec99" [ 956.530645] env[62923]: _type = "Task" [ 956.530645] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.540850] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52dab08b-37a4-58e1-9f8d-5249a852ec99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.646085] env[62923]: DEBUG nova.network.neutron [-] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.686914] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.687460] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5398e9d6-8f00-4f18-83be-629e35145d0c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.696964] env[62923]: DEBUG oslo_vmware.api [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 956.696964] env[62923]: value = "task-1370247" [ 956.696964] env[62923]: _type = "Task" [ 956.696964] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.710385] env[62923]: DEBUG oslo_vmware.api [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.824767] env[62923]: DEBUG nova.scheduler.client.report [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.885995] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370246, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.916459] env[62923]: DEBUG oslo_concurrency.lockutils [req-22338990-b863-4ec2-b1b1-e823fb23607d req-d4317430-6d0b-4047-be23-359f3fd16b5c service nova] Releasing lock "refresh_cache-2d7bceb2-450c-4747-bedb-aa9848450ca9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.023784] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.023784] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5d13485-9bea-44b8-b2e6-b073254c51f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.032654] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 957.032654] env[62923]: value = "task-1370248" [ 957.032654] env[62923]: _type = "Task" [ 957.032654] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.045521] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.049213] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52dab08b-37a4-58e1-9f8d-5249a852ec99, 'name': SearchDatastore_Task, 'duration_secs': 0.015337} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.049497] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.049725] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 2d7bceb2-450c-4747-bedb-aa9848450ca9/2d7bceb2-450c-4747-bedb-aa9848450ca9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.049989] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4c50f9f-b4bf-4438-a6e3-d701ec084765 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.059533] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 957.059533] env[62923]: value = "task-1370249" [ 957.059533] env[62923]: _type = "Task" [ 957.059533] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.076560] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.150635] env[62923]: INFO nova.compute.manager [-] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Took 1.28 seconds to deallocate network for instance. [ 957.207604] env[62923]: DEBUG oslo_vmware.api [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.312088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "interface-f52f5912-d6e8-4da5-ac39-65bb065b6555-1353213d-e2e0-4537-a849-37be48c686ac" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.312088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-f52f5912-d6e8-4da5-ac39-65bb065b6555-1353213d-e2e0-4537-a849-37be48c686ac" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.329756] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.849s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.358077] env[62923]: INFO nova.scheduler.client.report [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted allocations for instance 60805eeb-8287-4064-9bd3-a7c6a21f40b5 [ 957.387577] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370246, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.475196] env[62923]: DEBUG nova.compute.manager [req-74416369-7b4f-4ba8-91e8-a3607a1756a2 req-b35c90b5-caf3-4abd-aba1-b9eb23283656 service nova] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Received event network-vif-deleted-f0a69691-e627-471b-bf2c-1705c8a4f373 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.525381] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 957.548693] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.551484] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.551711] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.551872] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.552068] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.552219] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.552385] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.552575] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.552792] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.552975] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.553155] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.553329] env[62923]: DEBUG nova.virt.hardware [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.554190] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082d54be-b8ba-46dc-9d49-f6702f7a6628 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.566597] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cecbfb4-267a-4917-a2e6-79bbd2b530c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.576228] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477726} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.584057] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 2d7bceb2-450c-4747-bedb-aa9848450ca9/2d7bceb2-450c-4747-bedb-aa9848450ca9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.584429] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.584954] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-313f3c7a-4220-4dc4-a100-fec8bef781c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.592343] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 957.592343] env[62923]: value = "task-1370250" [ 957.592343] env[62923]: _type = "Task" [ 957.592343] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.601092] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.657987] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.658388] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.658456] env[62923]: DEBUG nova.objects.instance [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'resources' on Instance uuid 4de1c06d-3261-4447-b5bc-a21a91f7a812 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.708774] env[62923]: DEBUG oslo_vmware.api [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370247, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.815368] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.815555] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.816563] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8e675f-8d71-4ac0-8d7f-011395f3d6a7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.835537] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f3268b-076a-4166-bca6-5747c0fca7a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.861175] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Reconfiguring VM to detach interface {{(pid=62923) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 957.861467] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae1f9670-3cde-4079-ac74-8a6193fa8cb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.877233] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b3f94dc9-6e5d-4b47-b344-6a707342ed57 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "60805eeb-8287-4064-9bd3-a7c6a21f40b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.521s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.884033] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 957.884033] env[62923]: value = "task-1370251" [ 957.884033] env[62923]: _type = "Task" [ 957.884033] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.889762] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370246, 'name': Rename_Task, 'duration_secs': 1.206527} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.890304] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 957.891046] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ab3d5d2-c6af-4cd4-9247-38569fec4ad0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.895570] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.902027] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 957.902027] env[62923]: value = "task-1370252" [ 957.902027] env[62923]: _type = "Task" [ 957.902027] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.912128] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370252, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.048176] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370248, 'name': PowerOffVM_Task, 'duration_secs': 0.687602} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.048585] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.048895] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 17 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 958.102674] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068594} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.102958] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.103743] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956eb956-e66d-4e82-b05d-53e85e01ed0c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.127190] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 2d7bceb2-450c-4747-bedb-aa9848450ca9/2d7bceb2-450c-4747-bedb-aa9848450ca9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.128739] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a7c98f2-56e2-476f-b645-efa9c7966977 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.143852] env[62923]: DEBUG nova.compute.manager [req-c071bfaf-c00b-49f9-a4b9-ece41e711760 req-2fc1155e-545a-4354-b9fd-a37d9f1b9837 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Received event network-vif-plugged-4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 958.144088] env[62923]: DEBUG oslo_concurrency.lockutils [req-c071bfaf-c00b-49f9-a4b9-ece41e711760 req-2fc1155e-545a-4354-b9fd-a37d9f1b9837 service nova] Acquiring lock "906da59a-24ac-4486-a835-62d3f81d3683-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.144304] env[62923]: DEBUG oslo_concurrency.lockutils [req-c071bfaf-c00b-49f9-a4b9-ece41e711760 req-2fc1155e-545a-4354-b9fd-a37d9f1b9837 service nova] Lock "906da59a-24ac-4486-a835-62d3f81d3683-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.144471] env[62923]: DEBUG oslo_concurrency.lockutils [req-c071bfaf-c00b-49f9-a4b9-ece41e711760 req-2fc1155e-545a-4354-b9fd-a37d9f1b9837 service nova] Lock "906da59a-24ac-4486-a835-62d3f81d3683-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.144642] env[62923]: DEBUG nova.compute.manager [req-c071bfaf-c00b-49f9-a4b9-ece41e711760 req-2fc1155e-545a-4354-b9fd-a37d9f1b9837 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] No waiting events found dispatching network-vif-plugged-4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 958.144831] env[62923]: WARNING nova.compute.manager [req-c071bfaf-c00b-49f9-a4b9-ece41e711760 req-2fc1155e-545a-4354-b9fd-a37d9f1b9837 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Received unexpected event network-vif-plugged-4d94a3a6-5f65-455c-981e-c9aa13c739d7 for instance with vm_state building and task_state spawning. [ 958.151878] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 958.151878] env[62923]: value = "task-1370253" [ 958.151878] env[62923]: _type = "Task" [ 958.151878] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.163667] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.210433] env[62923]: DEBUG oslo_vmware.api [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370247, 'name': PowerOffVM_Task, 'duration_secs': 1.037837} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.213115] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.213345] env[62923]: DEBUG nova.compute.manager [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 958.214680] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d124657-af7d-48b8-bef7-269e11d238f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.302144] env[62923]: DEBUG nova.network.neutron [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Successfully updated port: 4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.384302] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10e3162-b7c8-405b-8009-7fa88757a91a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.398596] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e116e3c-ec72-4772-8ca9-8eb6d3e8ab83 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.401683] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.434714] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c50d78b-a8ad-4887-a78a-b605bc6c51f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.437616] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370252, 'name': PowerOnVM_Task, 'duration_secs': 0.464715} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.437926] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 958.438187] env[62923]: DEBUG nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 958.439447] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68de18c7-96d2-4609-b7e3-632d48f78a5a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.446374] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cc0f98-3d88-49ad-a35f-cd3c287e8bb7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.465441] env[62923]: DEBUG nova.compute.provider_tree [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.556499] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.556732] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.556892] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.557100] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.557261] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.557412] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.557614] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.557784] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.557950] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.558161] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.558344] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.563315] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f310207-5f3e-4489-8310-4f7dd6dc9ab4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.580556] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 958.580556] env[62923]: value = "task-1370254" [ 958.580556] env[62923]: _type = "Task" [ 958.580556] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.589496] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.661894] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370253, 'name': ReconfigVM_Task, 'duration_secs': 0.306159} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.662207] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 2d7bceb2-450c-4747-bedb-aa9848450ca9/2d7bceb2-450c-4747-bedb-aa9848450ca9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.662857] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04ae2c9b-0a27-44e9-a0ee-56a185c8ae54 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.670422] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 958.670422] env[62923]: value = "task-1370255" [ 958.670422] env[62923]: _type = "Task" [ 958.670422] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.679595] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370255, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.728488] env[62923]: DEBUG oslo_concurrency.lockutils [None req-51e4de68-9ed8-4094-b669-bbed4a3501d1 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.570s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.805389] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "refresh_cache-906da59a-24ac-4486-a835-62d3f81d3683" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.805389] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "refresh_cache-906da59a-24ac-4486-a835-62d3f81d3683" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.805622] env[62923]: DEBUG nova.network.neutron [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.897817] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.957150] env[62923]: INFO nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] bringing vm to original state: 'stopped' [ 958.970308] env[62923]: DEBUG nova.scheduler.client.report [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.091710] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370254, 'name': ReconfigVM_Task, 'duration_secs': 0.316374} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.092070] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 33 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 959.180976] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370255, 'name': Rename_Task, 'duration_secs': 0.200431} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.181368] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.181631] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e43e46d-aa1e-471b-a93c-0777c05ce4e8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.189317] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 959.189317] env[62923]: value = "task-1370256" [ 959.189317] env[62923]: _type = "Task" [ 959.189317] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.197686] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.361738] env[62923]: DEBUG nova.network.neutron [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.398104] env[62923]: INFO nova.compute.manager [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Rebuilding instance [ 959.400387] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.443243] env[62923]: DEBUG nova.compute.manager [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.444250] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477c0133-a8c9-4659-bab2-a37d19ee267e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.474467] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.505622] env[62923]: INFO nova.scheduler.client.report [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocations for instance 4de1c06d-3261-4447-b5bc-a21a91f7a812 [ 959.598546] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 959.598830] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 959.598928] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.599126] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 959.599280] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.599430] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 959.599632] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 959.599870] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 959.599945] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 959.600150] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 959.600339] env[62923]: DEBUG nova.virt.hardware [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 959.605528] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 959.605837] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75d829e4-99b1-43a4-8a42-a4b0d9d9e530 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.619190] env[62923]: DEBUG nova.network.neutron [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Updating instance_info_cache with network_info: [{"id": "4d94a3a6-5f65-455c-981e-c9aa13c739d7", "address": "fa:16:3e:32:b0:6e", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94a3a6-5f", "ovs_interfaceid": "4d94a3a6-5f65-455c-981e-c9aa13c739d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.629956] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 959.629956] env[62923]: value = "task-1370257" [ 959.629956] env[62923]: _type = "Task" [ 959.629956] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.641434] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.699858] env[62923]: DEBUG oslo_vmware.api [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370256, 'name': PowerOnVM_Task, 'duration_secs': 0.477836} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.700210] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.700352] env[62923]: INFO nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Took 6.87 seconds to spawn the instance on the hypervisor. [ 959.700531] env[62923]: DEBUG nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.701405] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806d5342-8c4a-45c2-b902-8ae2e6faa1a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.896909] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.958411] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.958721] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f0414af-c438-4986-8677-29c13f7c012b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.963116] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "eaa654f9-023d-4514-930d-6bebd421325a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.963441] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.963633] env[62923]: DEBUG nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.964448] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27894851-cac9-4c38-b862-c789649be87e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.968074] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 959.968074] env[62923]: value = "task-1370258" [ 959.968074] env[62923]: _type = "Task" [ 959.968074] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.972926] env[62923]: DEBUG nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 959.975051] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.975273] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea517b11-3b65-46bf-8214-2eeabd4cd726 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.979262] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.983740] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 959.983740] env[62923]: value = "task-1370259" [ 959.983740] env[62923]: _type = "Task" [ 959.983740] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.991930] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370259, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.011435] env[62923]: DEBUG nova.objects.instance [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.015660] env[62923]: DEBUG oslo_concurrency.lockutils [None req-01ebafe2-407b-4be8-ba04-67eedb89574f tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "4de1c06d-3261-4447-b5bc-a21a91f7a812" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.804s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.123984] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "refresh_cache-906da59a-24ac-4486-a835-62d3f81d3683" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.124396] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Instance network_info: |[{"id": "4d94a3a6-5f65-455c-981e-c9aa13c739d7", "address": "fa:16:3e:32:b0:6e", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94a3a6-5f", "ovs_interfaceid": "4d94a3a6-5f65-455c-981e-c9aa13c739d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 960.124952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:b0:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6eaa481-1f92-4851-b98e-09ed0daad7cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d94a3a6-5f65-455c-981e-c9aa13c739d7', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.135526] env[62923]: DEBUG oslo.service.loopingcall [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.136043] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.140214] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b079ac87-9609-402c-bbe8-9cbf1d412f4e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.160357] env[62923]: DEBUG nova.compute.manager [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Received event network-changed-4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.160681] env[62923]: DEBUG nova.compute.manager [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Refreshing instance network info cache due to event network-changed-4d94a3a6-5f65-455c-981e-c9aa13c739d7. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 960.161049] env[62923]: DEBUG oslo_concurrency.lockutils [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] Acquiring lock "refresh_cache-906da59a-24ac-4486-a835-62d3f81d3683" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.161312] env[62923]: DEBUG oslo_concurrency.lockutils [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] Acquired lock "refresh_cache-906da59a-24ac-4486-a835-62d3f81d3683" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.161574] env[62923]: DEBUG nova.network.neutron [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Refreshing network info cache for port 4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.171344] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370257, 'name': ReconfigVM_Task, 'duration_secs': 0.433074} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.174039] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 960.174669] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.174669] env[62923]: value = "task-1370260" [ 960.174669] env[62923]: _type = "Task" [ 960.174669] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.175432] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6abc368-db77-48b3-b862-acda0a593003 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.190311] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370260, 'name': CreateVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.208952] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d/2a9a93f8-9398-4a19-a149-a1092ceb416d.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.209558] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-badb32c9-5930-4c1c-bdca-b3c19dd2f825 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.231025] env[62923]: INFO nova.compute.manager [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Took 12.01 seconds to build instance. [ 960.235759] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 960.235759] env[62923]: value = "task-1370261" [ 960.235759] env[62923]: _type = "Task" [ 960.235759] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.246762] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370261, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.398130] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.479218] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370258, 'name': PowerOffVM_Task, 'duration_secs': 0.250183} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.479542] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.480308] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.480587] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e3fb7cf-4e99-4d0e-8e1e-82811ad8c9c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.489129] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 960.489129] env[62923]: value = "task-1370262" [ 960.489129] env[62923]: _type = "Task" [ 960.489129] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.497926] env[62923]: DEBUG oslo_vmware.api [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370259, 'name': PowerOffVM_Task, 'duration_secs': 0.200583} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.498625] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.498859] env[62923]: DEBUG nova.compute.manager [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 960.499652] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7235fc9d-6a13-4ed1-9b0d-e0ed86c2bd4a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.506033] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] VM already powered off {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 960.506359] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 960.506618] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291480', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'name': 'volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '65000ac5-1c28-4abe-bc96-c440f0b14d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'serial': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 960.507384] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27441c75-1af2-4063-828f-aefbb5367537 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.536505] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.536505] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.536505] env[62923]: DEBUG nova.network.neutron [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 960.536505] env[62923]: DEBUG nova.objects.instance [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'info_cache' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.537272] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78f5dd6-8697-44e4-8abb-b241cc5e3061 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.545391] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e97209-3b87-4f04-a340-2e817b5e0cad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.566473] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad9cf66-a714-463c-ad89-79f18d42704b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.582391] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] The volume has not been displaced from its original location: [datastore2] volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca/volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 960.587653] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 960.587967] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54af68c5-11a5-4ade-81d1-371a9568ec3c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.607118] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 960.607118] env[62923]: value = "task-1370263" [ 960.607118] env[62923]: _type = "Task" [ 960.607118] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.616152] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370263, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.690383] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370260, 'name': CreateVM_Task, 'duration_secs': 0.341023} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.690799] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.691590] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.691913] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.692342] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 960.693052] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c435ef77-cd5d-460c-b835-952377eecc55 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.698311] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 960.698311] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c30224-4bc2-dfef-d914-0fcc3a06e1ee" [ 960.698311] env[62923]: _type = "Task" [ 960.698311] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.707558] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c30224-4bc2-dfef-d914-0fcc3a06e1ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.732851] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1d03a635-23ac-4342-9614-a6f8edf0c388 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.522s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.747679] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370261, 'name': ReconfigVM_Task, 'duration_secs': 0.334825} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.748179] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d/2a9a93f8-9398-4a19-a149-a1092ceb416d.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.748623] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 50 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 960.897316] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.021639] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.058s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.026187] env[62923]: DEBUG nova.network.neutron [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Updated VIF entry in instance network info cache for port 4d94a3a6-5f65-455c-981e-c9aa13c739d7. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.026535] env[62923]: DEBUG nova.network.neutron [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Updating instance_info_cache with network_info: [{"id": "4d94a3a6-5f65-455c-981e-c9aa13c739d7", "address": "fa:16:3e:32:b0:6e", "network": {"id": "2beb4718-469b-47f0-94d1-7bc1c52e79db", "bridge": "br-int", "label": "tempest-ImagesTestJSON-615155455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1559d2844647aba922cae8e9d992e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6eaa481-1f92-4851-b98e-09ed0daad7cb", "external-id": "nsx-vlan-transportzone-636", "segmentation_id": 636, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d94a3a6-5f", "ovs_interfaceid": "4d94a3a6-5f65-455c-981e-c9aa13c739d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.041638] env[62923]: DEBUG nova.objects.base [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Object Instance<3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7> lazy-loaded attributes: flavor,info_cache {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 961.123811] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370263, 'name': ReconfigVM_Task, 'duration_secs': 0.287952} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.124125] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 961.128762] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a25a8ce-d219-43f7-b5bd-1d63009e58d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.146421] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 961.146421] env[62923]: value = "task-1370264" [ 961.146421] env[62923]: _type = "Task" [ 961.146421] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.155623] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370264, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.209696] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c30224-4bc2-dfef-d914-0fcc3a06e1ee, 'name': SearchDatastore_Task, 'duration_secs': 0.01251} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.210024] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.210271] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.210511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.210657] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.210840] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.211116] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9e023f4-1258-4e41-8f70-45c4f385f2dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.220306] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.220590] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.221425] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21b8402c-a654-430f-8d23-86904e12c8b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.227060] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "2d7bceb2-450c-4747-bedb-aa9848450ca9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.227399] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.227654] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "2d7bceb2-450c-4747-bedb-aa9848450ca9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.227932] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.228219] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.231055] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 961.231055] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529c2c1a-2585-14fb-f0f3-5339ce62cbc8" [ 961.231055] env[62923]: _type = "Task" [ 961.231055] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.233867] env[62923]: INFO nova.compute.manager [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Terminating instance [ 961.236176] env[62923]: DEBUG nova.compute.manager [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 961.236461] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.237318] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8be687-95b5-4529-b0d3-5f5ba4b977b2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.243846] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529c2c1a-2585-14fb-f0f3-5339ce62cbc8, 'name': SearchDatastore_Task, 'duration_secs': 0.009413} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.245426] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b1fcb0-96bf-4aa8-a0e5-232494ad5ee2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.249874] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.250540] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70117e1d-7722-4c66-b7a8-94e56ddaab2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.255160] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 961.255160] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f3a5a9-7f49-ab4c-b97c-eb3bccf953f9" [ 961.255160] env[62923]: _type = "Task" [ 961.255160] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.256098] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be138da-6e4e-417b-8095-f174ec6df31d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.263457] env[62923]: DEBUG oslo_vmware.api [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 961.263457] env[62923]: value = "task-1370265" [ 961.263457] env[62923]: _type = "Task" [ 961.263457] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.287096] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f3a5a9-7f49-ab4c-b97c-eb3bccf953f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010174} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.287959] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.288288] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 906da59a-24ac-4486-a835-62d3f81d3683/906da59a-24ac-4486-a835-62d3f81d3683.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.289141] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c590b5-c781-4bd0-b9ea-06a5b57d3f86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.295230] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-418e3906-1967-48ed-9498-7eb5b1290729 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.298938] env[62923]: DEBUG oslo_vmware.api [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.317980] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 67 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 961.326022] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 961.326022] env[62923]: value = "task-1370266" [ 961.326022] env[62923]: _type = "Task" [ 961.326022] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.333299] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370266, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.397988] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.529949] env[62923]: DEBUG oslo_concurrency.lockutils [req-bfddcbde-c633-4db6-9abe-d4cd04c96e75 req-24bf0836-ebff-4a1d-8194-0bda9343bb29 service nova] Releasing lock "refresh_cache-906da59a-24ac-4486-a835-62d3f81d3683" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.530883] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.530883] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.531040] env[62923]: DEBUG nova.objects.instance [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 961.661452] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370264, 'name': ReconfigVM_Task, 'duration_secs': 0.19759} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.661981] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291480', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'name': 'volume-25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '65000ac5-1c28-4abe-bc96-c440f0b14d3d', 'attached_at': '', 'detached_at': '', 'volume_id': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca', 'serial': '25dfd9ac-5161-4012-80a4-4fb573e0e4ca'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 961.662275] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.663361] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96fff87-ef6e-4645-9c01-11dbbaebef9b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.673726] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.674110] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81dde817-5d91-47d0-a224-5f525dcc0111 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.750205] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.750590] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.750641] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Deleting the datastore file [datastore2] 65000ac5-1c28-4abe-bc96-c440f0b14d3d {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.750882] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afaf27c1-8041-4424-928a-ee3a1089d982 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.759737] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for the task: (returnval){ [ 961.759737] env[62923]: value = "task-1370268" [ 961.759737] env[62923]: _type = "Task" [ 961.759737] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.770518] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.775755] env[62923]: DEBUG oslo_vmware.api [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370265, 'name': PowerOffVM_Task, 'duration_secs': 0.184552} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.776147] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.776442] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.776802] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c2026bd-0728-49c2-95fc-62334de6debb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.841983] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370266, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458088} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.842789] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 906da59a-24ac-4486-a835-62d3f81d3683/906da59a-24ac-4486-a835-62d3f81d3683.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.842789] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.843798] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41226ad3-1247-45ee-9f1e-e0ee995dc616 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.846486] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.846704] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.846906] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleting the datastore file [datastore2] 2d7bceb2-450c-4747-bedb-aa9848450ca9 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.847793] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90ef8be5-dd78-49a2-9890-5c4df3c80b98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.852287] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 961.852287] env[62923]: value = "task-1370270" [ 961.852287] env[62923]: _type = "Task" [ 961.852287] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.856787] env[62923]: DEBUG oslo_vmware.api [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 961.856787] env[62923]: value = "task-1370271" [ 961.856787] env[62923]: _type = "Task" [ 961.856787] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.863069] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370270, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.871057] env[62923]: DEBUG oslo_vmware.api [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.899011] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.919944] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.920205] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.963693] env[62923]: DEBUG nova.network.neutron [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Port 422a6526-df54-4c7f-a43c-01c8902e1fb8 binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 961.978197] env[62923]: DEBUG nova.network.neutron [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating instance_info_cache with network_info: [{"id": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "address": "fa:16:3e:79:27:67", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dafc2e-d2", "ovs_interfaceid": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.269432] env[62923]: DEBUG oslo_vmware.api [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Task: {'id': task-1370268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1047} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.269811] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.270182] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 962.270402] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 962.329518] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 962.329900] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e684886c-d182-47ff-bd61-e0ed44b9f87b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.340538] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc368fb-b5a3-412e-8ed8-1998fb3cce3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.364222] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370270, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067694} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.373814] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.375366] env[62923]: ERROR nova.compute.manager [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Failed to detach volume 25dfd9ac-5161-4012-80a4-4fb573e0e4ca from /dev/sda: nova.exception.InstanceNotFound: Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d could not be found. [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Traceback (most recent call last): [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self.driver.rebuild(**kwargs) [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] raise NotImplementedError() [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] NotImplementedError [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] During handling of the above exception, another exception occurred: [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Traceback (most recent call last): [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self.driver.detach_volume(context, old_connection_info, [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] return self._volumeops.detach_volume(connection_info, instance) [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._detach_volume_vmdk(connection_info, instance) [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] stable_ref.fetch_moref(session) [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] raise exception.InstanceNotFound(instance_id=self._uuid) [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] nova.exception.InstanceNotFound: Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d could not be found. [ 962.375366] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.380942] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cb647a-4a88-4597-b779-6bc92041b26f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.383375] env[62923]: DEBUG oslo_vmware.api [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381699} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.383649] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.383835] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 962.384035] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 962.384198] env[62923]: INFO nova.compute.manager [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 962.384435] env[62923]: DEBUG oslo.service.loopingcall [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.384975] env[62923]: DEBUG nova.compute.manager [-] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 962.385082] env[62923]: DEBUG nova.network.neutron [-] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 962.407340] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 906da59a-24ac-4486-a835-62d3f81d3683/906da59a-24ac-4486-a835-62d3f81d3683.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.413053] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3d1beba-c553-4da4-afda-0a0162f206f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.428250] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 962.444642] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.444642] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 962.444642] env[62923]: value = "task-1370272" [ 962.444642] env[62923]: _type = "Task" [ 962.444642] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.451788] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.480878] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.544269] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dbb5d127-24e7-423d-a506-03415307b89f tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.568457] env[62923]: DEBUG nova.compute.utils [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Build of instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d aborted: Failed to rebuild volume backed instance. {{(pid=62923) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 962.570878] env[62923]: ERROR nova.compute.manager [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d aborted: Failed to rebuild volume backed instance. [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Traceback (most recent call last): [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self.driver.rebuild(**kwargs) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] raise NotImplementedError() [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] NotImplementedError [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] During handling of the above exception, another exception occurred: [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Traceback (most recent call last): [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._detach_root_volume(context, instance, root_bdm) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] with excutils.save_and_reraise_exception(): [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self.force_reraise() [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] raise self.value [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self.driver.detach_volume(context, old_connection_info, [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] return self._volumeops.detach_volume(connection_info, instance) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._detach_volume_vmdk(connection_info, instance) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] stable_ref.fetch_moref(session) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] raise exception.InstanceNotFound(instance_id=self._uuid) [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] nova.exception.InstanceNotFound: Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d could not be found. [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] During handling of the above exception, another exception occurred: [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Traceback (most recent call last): [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] yield [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 962.570878] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._do_rebuild_instance_with_claim( [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._do_rebuild_instance( [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._rebuild_default_impl(**kwargs) [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] self._rebuild_volume_backed_instance( [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] raise exception.BuildAbortException( [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] nova.exception.BuildAbortException: Build of instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d aborted: Failed to rebuild volume backed instance. [ 962.572252] env[62923]: ERROR nova.compute.manager [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] [ 962.825906] env[62923]: DEBUG nova.compute.manager [req-54d9f413-9a74-455d-8e6c-dbc670fef36d req-c3dd0937-ca1f-45a9-97f2-de0aef9d9ebb service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Received event network-vif-deleted-ee920d2c-b952-40c2-aa5d-be3d494020ee {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.825906] env[62923]: INFO nova.compute.manager [req-54d9f413-9a74-455d-8e6c-dbc670fef36d req-c3dd0937-ca1f-45a9-97f2-de0aef9d9ebb service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Neutron deleted interface ee920d2c-b952-40c2-aa5d-be3d494020ee; detaching it from the instance and deleting it from the info cache [ 962.825906] env[62923]: DEBUG nova.network.neutron [req-54d9f413-9a74-455d-8e6c-dbc670fef36d req-c3dd0937-ca1f-45a9-97f2-de0aef9d9ebb service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.911339] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.936195] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "eaa654f9-023d-4514-930d-6bebd421325a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.936509] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.936693] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "eaa654f9-023d-4514-930d-6bebd421325a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.936945] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.937175] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.943887] env[62923]: INFO nova.compute.manager [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Terminating instance [ 962.946015] env[62923]: DEBUG nova.compute.manager [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.946221] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.947200] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f7648d-3a38-45cb-b178-a7339f3dc4bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.959434] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370272, 'name': ReconfigVM_Task, 'duration_secs': 0.274034} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.960325] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.960559] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.962011] env[62923]: INFO nova.compute.claims [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.967554] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 906da59a-24ac-4486-a835-62d3f81d3683/906da59a-24ac-4486-a835-62d3f81d3683.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.968433] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.969023] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-baaa256c-c9ec-4350-9740-1fb6bad12eb8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.970449] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13a3de31-ef9b-4745-b310-0568dcf54419 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.988042] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.988334] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.988623] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.988813] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.990761] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48fec1c2-7c6f-4bf7-b00f-85d3036f29c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.993313] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 962.993313] env[62923]: value = "task-1370273" [ 962.993313] env[62923]: _type = "Task" [ 962.993313] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.003974] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370273, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.005505] env[62923]: DEBUG oslo_vmware.api [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 963.005505] env[62923]: value = "task-1370275" [ 963.005505] env[62923]: _type = "Task" [ 963.005505] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.015299] env[62923]: DEBUG oslo_vmware.api [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.073092] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.073349] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.073686] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore2] eaa654f9-023d-4514-930d-6bebd421325a {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.076079] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72193568-a625-4ee1-854c-701af21089d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.085777] env[62923]: DEBUG oslo_vmware.api [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 963.085777] env[62923]: value = "task-1370276" [ 963.085777] env[62923]: _type = "Task" [ 963.085777] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.097426] env[62923]: DEBUG oslo_vmware.api [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.241971] env[62923]: DEBUG nova.network.neutron [-] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.330728] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2399492c-a815-4804-9282-093daf149243 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.343775] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa8648b-9b85-4871-8884-641c2fc5cc8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.377593] env[62923]: DEBUG nova.compute.manager [req-54d9f413-9a74-455d-8e6c-dbc670fef36d req-c3dd0937-ca1f-45a9-97f2-de0aef9d9ebb service nova] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Detach interface failed, port_id=ee920d2c-b952-40c2-aa5d-be3d494020ee, reason: Instance 2d7bceb2-450c-4747-bedb-aa9848450ca9 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 963.411311] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.503941] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370273, 'name': Rename_Task, 'duration_secs': 0.204664} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.504183] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.504377] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a894329-c2bd-45d7-ba67-da862a3c84db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.513395] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 963.513395] env[62923]: value = "task-1370277" [ 963.513395] env[62923]: _type = "Task" [ 963.513395] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.517030] env[62923]: DEBUG oslo_vmware.api [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370275, 'name': PowerOnVM_Task, 'duration_secs': 0.44941} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.521236] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.521458] env[62923]: DEBUG nova.compute.manager [None req-5cfb2b74-751f-429d-9647-1ea3b1f445af tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 963.522676] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4db9b1-737f-41c2-a934-66d0156a11a1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.531169] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.594347] env[62923]: DEBUG oslo_vmware.api [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141703} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.594772] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.594772] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.594917] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.595098] env[62923]: INFO nova.compute.manager [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Took 0.65 seconds to destroy the instance on the hypervisor. [ 963.595351] env[62923]: DEBUG oslo.service.loopingcall [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.595550] env[62923]: DEBUG nova.compute.manager [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.595643] env[62923]: DEBUG nova.network.neutron [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.744791] env[62923]: INFO nova.compute.manager [-] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Took 1.36 seconds to deallocate network for instance. [ 963.880259] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1b155391-37d9-4186-b70d-84f2dec5af82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.880604] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1b155391-37d9-4186-b70d-84f2dec5af82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.912077] env[62923]: DEBUG oslo_vmware.api [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370251, 'name': ReconfigVM_Task, 'duration_secs': 5.757688} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.912612] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.912832] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Reconfigured VM to detach interface {{(pid=62923) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 964.026303] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.030547] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.030772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.031016] env[62923]: DEBUG nova.network.neutron [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.169770] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b886518d-556c-474b-8794-dee70c6969f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.180607] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fcee9d-b658-44fa-885e-3f6b79fbceae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.219842] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757d4fb8-f92f-4a45-a6bd-a7cf07042ff4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.228088] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b6fff8-d689-48c9-b825-21896d98f936 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.243290] env[62923]: DEBUG nova.compute.provider_tree [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.253953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.344265] env[62923]: DEBUG nova.network.neutron [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.383714] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 964.527317] env[62923]: DEBUG oslo_vmware.api [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370277, 'name': PowerOnVM_Task, 'duration_secs': 0.93606} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.527703] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.527985] env[62923]: INFO nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Took 7.00 seconds to spawn the instance on the hypervisor. [ 964.528336] env[62923]: DEBUG nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 964.529469] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b57c88-5ee8-4f27-bfac-a9b3712a3aa9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.587639] env[62923]: DEBUG oslo_concurrency.lockutils [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.747624] env[62923]: DEBUG nova.scheduler.client.report [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.788885] env[62923]: DEBUG nova.network.neutron [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance_info_cache with network_info: [{"id": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "address": "fa:16:3e:3e:65:69", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap422a6526-df", "ovs_interfaceid": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.848020] env[62923]: DEBUG nova.compute.manager [req-8157b707-610c-4745-a38c-e40f166685b0 req-025bd87b-204a-4624-99fa-158663209d32 service nova] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Received event network-vif-deleted-f1ad0989-e12d-4073-92b4-3a53bf5b8eb2 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.848020] env[62923]: INFO nova.compute.manager [-] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Took 1.25 seconds to deallocate network for instance. [ 964.904742] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.054854] env[62923]: INFO nova.compute.manager [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Took 13.23 seconds to build instance. [ 965.061566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.061566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.061709] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.061876] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.062494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.063744] env[62923]: INFO nova.compute.manager [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Terminating instance [ 965.065380] env[62923]: DEBUG nova.compute.manager [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.065570] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.066528] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2232f0bb-14a6-4219-b486-fdedbe242a6a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.076143] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.076413] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e131f23-e4d1-4906-b66e-6ee8646929d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.083918] env[62923]: DEBUG oslo_vmware.api [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 965.083918] env[62923]: value = "task-1370278" [ 965.083918] env[62923]: _type = "Task" [ 965.083918] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.092699] env[62923]: DEBUG oslo_vmware.api [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.257191] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.257778] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 965.260729] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.260960] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquired lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.261212] env[62923]: DEBUG nova.network.neutron [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.262577] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.009s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.263306] env[62923]: DEBUG nova.objects.instance [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lazy-loading 'resources' on Instance uuid 2d7bceb2-450c-4747-bedb-aa9848450ca9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.291525] env[62923]: DEBUG oslo_concurrency.lockutils [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.353944] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.557504] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c655aaed-8e62-4eab-bb57-3420446234dd tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "906da59a-24ac-4486-a835-62d3f81d3683" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.739s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.595165] env[62923]: DEBUG oslo_vmware.api [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370278, 'name': PowerOffVM_Task, 'duration_secs': 0.193216} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.595462] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.595637] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.595923] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da5ecd7f-06b7-488b-8e0c-527fd38ea737 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.658766] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.658928] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.659163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.659352] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.659507] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.661873] env[62923]: INFO nova.compute.manager [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Terminating instance [ 965.663876] env[62923]: DEBUG nova.compute.manager [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.664458] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8147b1e0-469c-44d4-9a85-b91c5565d923 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.675227] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcff04a-7dc5-4c1c-92a4-3e771913807c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.711146] env[62923]: WARNING nova.virt.vmwareapi.driver [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d could not be found. [ 965.711337] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.711609] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f65ef7d2-ea95-4007-9efe-0d8b9b862e92 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.721238] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231c1a6b-a8fc-4342-9f27-a7e491c9fd3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.737465] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.738193] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.738535] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleting the datastore file [datastore1] f52f5912-d6e8-4da5-ac39-65bb065b6555 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.738902] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-648007f2-d69b-49e5-adee-a95a78a642de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.746605] env[62923]: DEBUG oslo_vmware.api [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 965.746605] env[62923]: value = "task-1370280" [ 965.746605] env[62923]: _type = "Task" [ 965.746605] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.758451] env[62923]: WARNING nova.virt.vmwareapi.vmops [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d could not be found. [ 965.758702] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.759055] env[62923]: INFO nova.compute.manager [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Took 0.09 seconds to destroy the instance on the hypervisor. [ 965.759247] env[62923]: DEBUG oslo.service.loopingcall [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.760938] env[62923]: DEBUG nova.compute.manager [-] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 965.761054] env[62923]: DEBUG nova.network.neutron [-] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.771686] env[62923]: DEBUG nova.compute.utils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 965.778128] env[62923]: DEBUG oslo_vmware.api [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.781412] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 965.781566] env[62923]: DEBUG nova.network.neutron [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.827114] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c971ac00-0be4-4992-8a9d-bd7ccb10c39b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.849958] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998426cd-b970-480f-9e38-2372eb2b84d1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.854178] env[62923]: DEBUG nova.policy [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c654b8365f5543f3bf713f3f5aa00654', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a654d46357ed49cd95460a56926f102a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 965.865257] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 83 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 966.032699] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751d45eb-abe4-4558-9476-1695bb956f5f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.041399] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d11534-c57b-40be-8dad-64e0e630fa17 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.073980] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd39260-3f3f-44e0-9414-7a7d08693479 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.082542] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da95011-605b-4774-9f62-b04515515326 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.100815] env[62923]: DEBUG nova.compute.provider_tree [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.152412] env[62923]: DEBUG nova.network.neutron [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Successfully created port: 990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.258203] env[62923]: DEBUG oslo_vmware.api [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198594} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.258513] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.258713] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.258980] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.259191] env[62923]: INFO nova.compute.manager [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Took 1.19 seconds to destroy the instance on the hypervisor. [ 966.259458] env[62923]: DEBUG oslo.service.loopingcall [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.259802] env[62923]: DEBUG nova.compute.manager [-] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.259913] env[62923]: DEBUG nova.network.neutron [-] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.279479] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 966.285481] env[62923]: DEBUG nova.compute.manager [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 966.286445] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09a4b8b-621d-4b30-a039-fbac5501d121 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.375390] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.376323] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0909d81e-5100-4314-996b-9b9c618bea91 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.386808] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 966.386808] env[62923]: value = "task-1370281" [ 966.386808] env[62923]: _type = "Task" [ 966.386808] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.396557] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370281, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.595542] env[62923]: INFO nova.network.neutron [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Port 1353213d-e2e0-4537-a849-37be48c686ac from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 966.595978] env[62923]: DEBUG nova.network.neutron [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [{"id": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "address": "fa:16:3e:bf:9d:f5", "network": {"id": "dc0481d3-aa80-48c6-bea8-294b2d1f77ec", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-996225709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0db41047d1004a1d9ca7f663178058da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3adfc18d-e4", "ovs_interfaceid": "3adfc18d-e45f-4eb0-8019-d5531853f63f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.608153] env[62923]: DEBUG nova.scheduler.client.report [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.799792] env[62923]: INFO nova.compute.manager [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] instance snapshotting [ 966.805526] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a85479e-fe5c-4c36-a7e8-917d7ec26aa6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.833765] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd99e53a-1a06-459a-ae2f-b7345778513c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.901789] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370281, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.093127] env[62923]: DEBUG nova.compute.manager [req-49ed8718-f70d-4bc0-93e9-9f6d8a53b943 req-25328341-75d9-4a5d-b654-c14caacca221 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Received event network-vif-deleted-3adfc18d-e45f-4eb0-8019-d5531853f63f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.093399] env[62923]: INFO nova.compute.manager [req-49ed8718-f70d-4bc0-93e9-9f6d8a53b943 req-25328341-75d9-4a5d-b654-c14caacca221 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Neutron deleted interface 3adfc18d-e45f-4eb0-8019-d5531853f63f; detaching it from the instance and deleting it from the info cache [ 967.093637] env[62923]: DEBUG nova.network.neutron [req-49ed8718-f70d-4bc0-93e9-9f6d8a53b943 req-25328341-75d9-4a5d-b654-c14caacca221 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.102706] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Releasing lock "refresh_cache-f52f5912-d6e8-4da5-ac39-65bb065b6555" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.122878] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.860s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.125998] env[62923]: DEBUG oslo_concurrency.lockutils [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.538s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.177613] env[62923]: INFO nova.scheduler.client.report [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 2d7bceb2-450c-4747-bedb-aa9848450ca9 [ 967.189345] env[62923]: DEBUG nova.compute.manager [req-8fd6435b-a258-4cdd-b51e-f27da4ebe5a4 req-7b3ebd75-28e2-4fa9-918c-27608edb47e8 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Received event network-vif-deleted-9d495caf-4038-4207-8c80-1309086eddfc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.189546] env[62923]: INFO nova.compute.manager [req-8fd6435b-a258-4cdd-b51e-f27da4ebe5a4 req-7b3ebd75-28e2-4fa9-918c-27608edb47e8 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Neutron deleted interface 9d495caf-4038-4207-8c80-1309086eddfc; detaching it from the instance and deleting it from the info cache [ 967.189912] env[62923]: DEBUG nova.network.neutron [req-8fd6435b-a258-4cdd-b51e-f27da4ebe5a4 req-7b3ebd75-28e2-4fa9-918c-27608edb47e8 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.290222] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 967.320508] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 967.320751] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 967.320913] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.321134] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 967.321302] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.321453] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 967.321660] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 967.321825] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 967.321996] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 967.322184] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 967.322361] env[62923]: DEBUG nova.virt.hardware [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.323405] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dc7c89-63a4-4263-92b7-54efd7494c7a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.335046] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075f80a3-2ad1-4776-94ec-e0ee8b9fa1e7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.351276] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Creating Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 967.354134] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ee2b425f-eee5-4dd8-909b-59f419ed37ef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.362267] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 967.362267] env[62923]: value = "task-1370282" [ 967.362267] env[62923]: _type = "Task" [ 967.362267] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.371100] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370282, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.400724] env[62923]: DEBUG oslo_vmware.api [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370281, 'name': PowerOnVM_Task, 'duration_secs': 0.581987} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.402100] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.404170] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-13acb31c-56df-4beb-a348-290fa6adb9c7 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance '2a9a93f8-9398-4a19-a149-a1092ceb416d' progress to 100 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 967.407982] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8047cb0-5baf-49bd-9e8a-afb5724c014d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.417746] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c133031a-bbe9-4d32-a251-532dffbeac49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.454229] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9f7c40-4f16-476f-9058-e954e073deca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.463363] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62644aa0-2c67-4564-876f-04a77c777661 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.478104] env[62923]: DEBUG nova.compute.provider_tree [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.551207] env[62923]: DEBUG nova.network.neutron [-] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.596294] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc2bb0f1-9cec-452d-8f5b-696c23a01fcc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.606700] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dd860c-8e6f-4116-8e36-e903b495737d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.617883] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2b74d1f1-cc50-4a42-9bb7-3647469a12bb tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "interface-f52f5912-d6e8-4da5-ac39-65bb065b6555-1353213d-e2e0-4537-a849-37be48c686ac" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.306s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.639618] env[62923]: DEBUG nova.compute.manager [req-49ed8718-f70d-4bc0-93e9-9f6d8a53b943 req-25328341-75d9-4a5d-b654-c14caacca221 service nova] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Detach interface failed, port_id=3adfc18d-e45f-4eb0-8019-d5531853f63f, reason: Instance f52f5912-d6e8-4da5-ac39-65bb065b6555 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 967.649649] env[62923]: DEBUG nova.network.neutron [-] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.699187] env[62923]: DEBUG oslo_concurrency.lockutils [None req-05f58141-a616-4773-8ebd-ffafaef44be8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "2d7bceb2-450c-4747-bedb-aa9848450ca9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.468s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.701412] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e1a5253-3861-43d0-82e5-3096824636c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.715392] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f919fb5-5713-4b4d-991a-8da6f6da9762 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.752916] env[62923]: DEBUG nova.compute.manager [req-8fd6435b-a258-4cdd-b51e-f27da4ebe5a4 req-7b3ebd75-28e2-4fa9-918c-27608edb47e8 service nova] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Detach interface failed, port_id=9d495caf-4038-4207-8c80-1309086eddfc, reason: Instance 65000ac5-1c28-4abe-bc96-c440f0b14d3d could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 967.836942] env[62923]: DEBUG nova.network.neutron [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Successfully updated port: 990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 967.878785] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370282, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.982463] env[62923]: DEBUG nova.scheduler.client.report [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.053770] env[62923]: INFO nova.compute.manager [-] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Took 1.79 seconds to deallocate network for instance. [ 968.152279] env[62923]: INFO nova.compute.manager [-] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Took 2.39 seconds to deallocate network for instance. [ 968.337086] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.337232] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.337430] env[62923]: DEBUG nova.network.neutron [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.376037] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370282, 'name': CreateSnapshot_Task, 'duration_secs': 0.851384} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.376381] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Created Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 968.377318] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ce7af5-b246-4943-9227-70185e57d0a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.487386] env[62923]: DEBUG oslo_concurrency.lockutils [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.361s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.487610] env[62923]: INFO nova.compute.manager [None req-830d45fc-6b10-43f8-9536-3394d644dbb3 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Successfully reverted task state from rebuilding on failure for instance. [ 968.492924] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.588s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.494437] env[62923]: INFO nova.compute.claims [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 968.567439] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.716335] env[62923]: INFO nova.compute.manager [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Took 0.56 seconds to detach 1 volumes for instance. [ 968.718828] env[62923]: DEBUG nova.compute.manager [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Deleting volume: 25dfd9ac-5161-4012-80a4-4fb573e0e4ca {{(pid=62923) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 968.849802] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.849927] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.887053] env[62923]: DEBUG nova.network.neutron [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 968.895552] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Creating linked-clone VM from snapshot {{(pid=62923) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 968.896268] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0aafba61-69b3-49a8-87a7-c1bdc8ceb90c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.909072] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 968.909072] env[62923]: value = "task-1370284" [ 968.909072] env[62923]: _type = "Task" [ 968.909072] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.929233] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370284, 'name': CloneVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.063890] env[62923]: DEBUG nova.network.neutron [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [{"id": "990e9014-0a5e-465f-8306-404937c589e0", "address": "fa:16:3e:1f:14:02", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap990e9014-0a", "ovs_interfaceid": "990e9014-0a5e-465f-8306-404937c589e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.128567] env[62923]: DEBUG nova.compute.manager [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Received event network-vif-plugged-990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.128793] env[62923]: DEBUG oslo_concurrency.lockutils [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] Acquiring lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.129011] env[62923]: DEBUG oslo_concurrency.lockutils [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.129194] env[62923]: DEBUG oslo_concurrency.lockutils [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.129361] env[62923]: DEBUG nova.compute.manager [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] No waiting events found dispatching network-vif-plugged-990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 969.129525] env[62923]: WARNING nova.compute.manager [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Received unexpected event network-vif-plugged-990e9014-0a5e-465f-8306-404937c589e0 for instance with vm_state building and task_state spawning. [ 969.129685] env[62923]: DEBUG nova.compute.manager [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Received event network-changed-990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.129839] env[62923]: DEBUG nova.compute.manager [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Refreshing instance network info cache due to event network-changed-990e9014-0a5e-465f-8306-404937c589e0. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 969.130093] env[62923]: DEBUG oslo_concurrency.lockutils [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] Acquiring lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.270803] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.352292] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 969.420328] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370284, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.566675] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.567057] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Instance network_info: |[{"id": "990e9014-0a5e-465f-8306-404937c589e0", "address": "fa:16:3e:1f:14:02", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap990e9014-0a", "ovs_interfaceid": "990e9014-0a5e-465f-8306-404937c589e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 969.569548] env[62923]: DEBUG oslo_concurrency.lockutils [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] Acquired lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.569741] env[62923]: DEBUG nova.network.neutron [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Refreshing network info cache for port 990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.571020] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:14:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '990e9014-0a5e-465f-8306-404937c589e0', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 969.579056] env[62923]: DEBUG oslo.service.loopingcall [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 969.581808] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 969.582338] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bef837b5-27e1-44ca-93e6-a25056cd8b07 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.605641] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 969.605641] env[62923]: value = "task-1370285" [ 969.605641] env[62923]: _type = "Task" [ 969.605641] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.616796] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370285, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.729657] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9356e78-a7d3-4387-845e-b0e201e28f28 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.738079] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaf7246-ba25-4a12-b7ae-111b72861b5d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.774130] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9c2cb3-2394-4f52-9b9b-da2a530e55ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.785323] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09f1b00-ef68-4d0f-9710-d4900a56c7fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.799981] env[62923]: DEBUG nova.compute.provider_tree [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.819400] env[62923]: DEBUG nova.network.neutron [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updated VIF entry in instance network info cache for port 990e9014-0a5e-465f-8306-404937c589e0. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.819749] env[62923]: DEBUG nova.network.neutron [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [{"id": "990e9014-0a5e-465f-8306-404937c589e0", "address": "fa:16:3e:1f:14:02", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap990e9014-0a", "ovs_interfaceid": "990e9014-0a5e-465f-8306-404937c589e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.862639] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.862954] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.863167] env[62923]: DEBUG nova.compute.manager [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Going to confirm migration 2 {{(pid=62923) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 969.873750] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.919812] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370284, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.118094] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370285, 'name': CreateVM_Task, 'duration_secs': 0.37132} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.118094] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.118094] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.118094] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.118094] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 970.118094] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b17bd316-db4d-4709-a290-a1d3b0ed8a3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.122936] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 970.122936] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5281d70a-358e-7166-7ced-5e76ae643b55" [ 970.122936] env[62923]: _type = "Task" [ 970.122936] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.131141] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5281d70a-358e-7166-7ced-5e76ae643b55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.304073] env[62923]: DEBUG nova.scheduler.client.report [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.322521] env[62923]: DEBUG oslo_concurrency.lockutils [req-f22493db-2958-4cf4-b3c7-d622a240bd40 req-fa040ab7-756c-4c27-89e9-750bbd84e90d service nova] Releasing lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.420891] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370284, 'name': CloneVM_Task, 'duration_secs': 1.387059} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.421189] env[62923]: INFO nova.virt.vmwareapi.vmops [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Created linked-clone VM from snapshot [ 970.421930] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57b281b-70af-43f1-a8a9-f6a8d7bb8fc9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.429793] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Uploading image abb930ad-57f7-4305-8712-85e295a8f519 {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 970.442118] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Destroying the VM {{(pid=62923) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 970.442673] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6defa154-cd38-4163-bfdb-3e848431b219 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.449739] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 970.449739] env[62923]: value = "task-1370286" [ 970.449739] env[62923]: _type = "Task" [ 970.449739] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.454743] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.454951] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.455139] env[62923]: DEBUG nova.network.neutron [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.455370] env[62923]: DEBUG nova.objects.instance [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lazy-loading 'info_cache' on Instance uuid 2a9a93f8-9398-4a19-a149-a1092ceb416d {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.461328] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370286, 'name': Destroy_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.635698] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5281d70a-358e-7166-7ced-5e76ae643b55, 'name': SearchDatastore_Task, 'duration_secs': 0.041751} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.636151] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.636438] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.636704] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.636863] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.637081] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.637368] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7b16d74-098d-44e8-83fd-4b6430856328 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.647545] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.647745] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 970.648498] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51fa4cf5-a98f-425d-9899-470ebaae2b3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.654133] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 970.654133] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5243a8bd-3b6b-4a7f-f4cb-71645c7f252b" [ 970.654133] env[62923]: _type = "Task" [ 970.654133] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.662037] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5243a8bd-3b6b-4a7f-f4cb-71645c7f252b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.809849] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.810454] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 970.813620] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.459s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.813620] env[62923]: DEBUG nova.objects.instance [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'resources' on Instance uuid eaa654f9-023d-4514-930d-6bebd421325a {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.960642] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370286, 'name': Destroy_Task} progress is 33%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.165043] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5243a8bd-3b6b-4a7f-f4cb-71645c7f252b, 'name': SearchDatastore_Task, 'duration_secs': 0.009331} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.165796] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03593523-4173-4e0e-aee4-692a08ba36ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.171122] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 971.171122] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5212cc59-d11e-5951-18e5-6e6dbf1f9947" [ 971.171122] env[62923]: _type = "Task" [ 971.171122] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.178955] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5212cc59-d11e-5951-18e5-6e6dbf1f9947, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.316457] env[62923]: DEBUG nova.compute.utils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 971.318351] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 971.318351] env[62923]: DEBUG nova.network.neutron [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 971.366302] env[62923]: DEBUG nova.policy [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '182e1b6f26ed401da24d07a85f993802', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '011a5ec25af44f92961be00f82c10c08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 971.462010] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370286, 'name': Destroy_Task, 'duration_secs': 0.619397} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.462231] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Destroyed the VM [ 971.462467] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Deleting Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 971.462760] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bb15adf5-a45a-4178-845b-d644220af0b3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.473212] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 971.473212] env[62923]: value = "task-1370287" [ 971.473212] env[62923]: _type = "Task" [ 971.473212] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.485513] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.489905] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.490315] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.527584] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7c1731-3412-44d9-a644-13a576a33a42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.536507] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab14933a-38b2-446c-8225-2e2498afca81 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.580201] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593de0be-8d3f-4b3b-8bc1-97c7ca77ac29 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.588834] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd130df-9399-407f-9c23-7f7695dbb220 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.603929] env[62923]: DEBUG nova.compute.provider_tree [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.634557] env[62923]: DEBUG nova.network.neutron [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Successfully created port: fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 971.682754] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5212cc59-d11e-5951-18e5-6e6dbf1f9947, 'name': SearchDatastore_Task, 'duration_secs': 0.010222} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.683049] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.683817] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28/92a10f0a-4bfd-405a-956e-3ea29a740b28.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 971.684137] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0b55a68-857d-4316-8f7b-16ff05880976 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.692514] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 971.692514] env[62923]: value = "task-1370288" [ 971.692514] env[62923]: _type = "Task" [ 971.692514] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.701511] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370288, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.804443] env[62923]: DEBUG nova.network.neutron [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance_info_cache with network_info: [{"id": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "address": "fa:16:3e:3e:65:69", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap422a6526-df", "ovs_interfaceid": "422a6526-df54-4c7f-a43c-01c8902e1fb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.823447] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 971.988698] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370287, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.998988] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.999270] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 971.999379] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 972.109590] env[62923]: DEBUG nova.scheduler.client.report [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.202839] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370288, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474535} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.203155] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28/92a10f0a-4bfd-405a-956e-3ea29a740b28.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.204039] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.204039] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dacd5289-c461-4a6c-8812-1cce37169227 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.211715] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 972.211715] env[62923]: value = "task-1370289" [ 972.211715] env[62923]: _type = "Task" [ 972.211715] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.220320] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.307756] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-2a9a93f8-9398-4a19-a149-a1092ceb416d" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.308144] env[62923]: DEBUG nova.objects.instance [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lazy-loading 'migration_context' on Instance uuid 2a9a93f8-9398-4a19-a149-a1092ceb416d {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.487830] env[62923]: DEBUG oslo_vmware.api [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370287, 'name': RemoveSnapshot_Task, 'duration_secs': 0.803953} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.488162] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Deleted Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 972.508135] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 972.508312] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Skipping network cache update for instance because it is Building. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 972.545983] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.546157] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.546308] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Forcefully refreshing network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 972.546461] env[62923]: DEBUG nova.objects.instance [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lazy-loading 'info_cache' on Instance uuid 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.615581] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.618463] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.051s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.618569] env[62923]: DEBUG nova.objects.instance [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lazy-loading 'resources' on Instance uuid f52f5912-d6e8-4da5-ac39-65bb065b6555 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.638069] env[62923]: INFO nova.scheduler.client.report [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted allocations for instance eaa654f9-023d-4514-930d-6bebd421325a [ 972.721739] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078375} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.722670] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.723465] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c7c81a-babe-4764-a59c-0532e67f8d52 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.746076] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28/92a10f0a-4bfd-405a-956e-3ea29a740b28.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.746579] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6031c8b-c380-4661-805f-168ef475439d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.767657] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 972.767657] env[62923]: value = "task-1370290" [ 972.767657] env[62923]: _type = "Task" [ 972.767657] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.775873] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370290, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.811026] env[62923]: DEBUG nova.objects.base [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Object Instance<2a9a93f8-9398-4a19-a149-a1092ceb416d> lazy-loaded attributes: info_cache,migration_context {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 972.811585] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa988b2-7779-4047-b597-b898370217c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.831871] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 972.833780] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f01d037-2f22-45c4-ba6f-50772462fb06 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.840737] env[62923]: DEBUG oslo_vmware.api [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 972.840737] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f1a718-9797-7714-ed45-1280c336ee88" [ 972.840737] env[62923]: _type = "Task" [ 972.840737] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.849517] env[62923]: DEBUG oslo_vmware.api [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f1a718-9797-7714-ed45-1280c336ee88, 'name': SearchDatastore_Task, 'duration_secs': 0.006941} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.849787] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.859391] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 972.859670] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 972.859878] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.860131] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 972.860331] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.860525] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 972.860780] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 972.861062] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 972.861367] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 972.861593] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 972.861782] env[62923]: DEBUG nova.virt.hardware [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.862615] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9a9fbf-4714-431a-b75f-0fc39875220f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.870766] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6965fe6e-3a9f-4fd3-90d9-b542d3619c2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.993256] env[62923]: WARNING nova.compute.manager [None req-1e5dd9c3-5b39-4402-ba45-9c0a8ca5f7fe tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Image not found during snapshot: nova.exception.ImageNotFound: Image abb930ad-57f7-4305-8712-85e295a8f519 could not be found. [ 973.033278] env[62923]: DEBUG nova.compute.manager [req-8e431b42-62cb-4075-8ad6-565c0ce252c4 req-c30750ae-a00d-4249-b9f9-519755c93576 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Received event network-vif-plugged-fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.033516] env[62923]: DEBUG oslo_concurrency.lockutils [req-8e431b42-62cb-4075-8ad6-565c0ce252c4 req-c30750ae-a00d-4249-b9f9-519755c93576 service nova] Acquiring lock "1b155391-37d9-4186-b70d-84f2dec5af82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.033773] env[62923]: DEBUG oslo_concurrency.lockutils [req-8e431b42-62cb-4075-8ad6-565c0ce252c4 req-c30750ae-a00d-4249-b9f9-519755c93576 service nova] Lock "1b155391-37d9-4186-b70d-84f2dec5af82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.034065] env[62923]: DEBUG oslo_concurrency.lockutils [req-8e431b42-62cb-4075-8ad6-565c0ce252c4 req-c30750ae-a00d-4249-b9f9-519755c93576 service nova] Lock "1b155391-37d9-4186-b70d-84f2dec5af82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.034193] env[62923]: DEBUG nova.compute.manager [req-8e431b42-62cb-4075-8ad6-565c0ce252c4 req-c30750ae-a00d-4249-b9f9-519755c93576 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] No waiting events found dispatching network-vif-plugged-fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 973.034306] env[62923]: WARNING nova.compute.manager [req-8e431b42-62cb-4075-8ad6-565c0ce252c4 req-c30750ae-a00d-4249-b9f9-519755c93576 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Received unexpected event network-vif-plugged-fd1dcc11-df32-408e-9548-4faf2556a924 for instance with vm_state building and task_state spawning. [ 973.121079] env[62923]: DEBUG nova.network.neutron [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Successfully updated port: fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 973.147174] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2246697e-d79b-4c9e-a0a5-7bbde6e18776 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "eaa654f9-023d-4514-930d-6bebd421325a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.210s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.281613] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370290, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.307077] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cc02b6-6931-44f3-91a0-972dc9319874 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.315052] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ccd111-3cfb-44a7-b0a9-33dcdc07fe48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.345802] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916dce9e-ae14-4b46-9a8a-1adf8a63c1e7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.353420] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f123dca-25c7-4333-b312-8ded9212bb0e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.367908] env[62923]: DEBUG nova.compute.provider_tree [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.626360] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1b155391-37d9-4186-b70d-84f2dec5af82" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.626360] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1b155391-37d9-4186-b70d-84f2dec5af82" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.626360] env[62923]: DEBUG nova.network.neutron [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 973.698227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "906da59a-24ac-4486-a835-62d3f81d3683" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.698227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "906da59a-24ac-4486-a835-62d3f81d3683" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.698227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "906da59a-24ac-4486-a835-62d3f81d3683-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.698227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "906da59a-24ac-4486-a835-62d3f81d3683-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.698227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "906da59a-24ac-4486-a835-62d3f81d3683-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.700106] env[62923]: INFO nova.compute.manager [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Terminating instance [ 973.702205] env[62923]: DEBUG nova.compute.manager [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 973.702402] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.703266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97cc582-24f1-4250-9236-f45109fd43c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.712374] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.712622] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e693a29-fc6e-4d9d-b063-fabcfbc1f3b6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.719423] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 973.719423] env[62923]: value = "task-1370291" [ 973.719423] env[62923]: _type = "Task" [ 973.719423] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.728273] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.781021] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370290, 'name': ReconfigVM_Task, 'duration_secs': 0.517065} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.781316] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28/92a10f0a-4bfd-405a-956e-3ea29a740b28.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.781957] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd6a452a-b60d-4767-b883-ca48d098013a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.789181] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 973.789181] env[62923]: value = "task-1370292" [ 973.789181] env[62923]: _type = "Task" [ 973.789181] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.797706] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370292, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.871017] env[62923]: DEBUG nova.scheduler.client.report [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.172393] env[62923]: DEBUG nova.network.neutron [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.230909] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.299912] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370292, 'name': Rename_Task, 'duration_secs': 0.14273} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.300325] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.300607] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfd452bc-cc95-4601-bbc9-7026f089a8f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.308686] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 974.308686] env[62923]: value = "task-1370293" [ 974.308686] env[62923]: _type = "Task" [ 974.308686] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.317900] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.358270] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.376088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.758s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.379190] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.108s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.379190] env[62923]: DEBUG nova.objects.instance [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lazy-loading 'resources' on Instance uuid 65000ac5-1c28-4abe-bc96-c440f0b14d3d {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.405809] env[62923]: DEBUG nova.network.neutron [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Updating instance_info_cache with network_info: [{"id": "fd1dcc11-df32-408e-9548-4faf2556a924", "address": "fa:16:3e:fc:0e:20", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd1dcc11-df", "ovs_interfaceid": "fd1dcc11-df32-408e-9548-4faf2556a924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.422625] env[62923]: INFO nova.scheduler.client.report [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleted allocations for instance f52f5912-d6e8-4da5-ac39-65bb065b6555 [ 974.736544] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370291, 'name': PowerOffVM_Task, 'duration_secs': 0.708323} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.736905] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.737065] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.737348] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba32b10e-4698-4c44-9984-4ec4ea74b44f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.825919] env[62923]: DEBUG oslo_vmware.api [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370293, 'name': PowerOnVM_Task, 'duration_secs': 0.453892} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.827382] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.827709] env[62923]: INFO nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Took 7.54 seconds to spawn the instance on the hypervisor. [ 974.827985] env[62923]: DEBUG nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 974.828327] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.828520] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.828699] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleting the datastore file [datastore1] 906da59a-24ac-4486-a835-62d3f81d3683 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.829529] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fead70-6e26-423e-8592-e68f3f0a88ef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.832133] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6fe5c3a-257e-42b9-b356-f1e881f9d429 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.846057] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for the task: (returnval){ [ 974.846057] env[62923]: value = "task-1370295" [ 974.846057] env[62923]: _type = "Task" [ 974.846057] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.854266] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.861036] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.862146] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updated the network info_cache for instance {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 974.862146] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.862146] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.862338] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.862519] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.863925] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.863925] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.863925] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 974.863925] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.911014] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1b155391-37d9-4186-b70d-84f2dec5af82" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.911014] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance network_info: |[{"id": "fd1dcc11-df32-408e-9548-4faf2556a924", "address": "fa:16:3e:fc:0e:20", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd1dcc11-df", "ovs_interfaceid": "fd1dcc11-df32-408e-9548-4faf2556a924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 974.911014] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:0e:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4c7a041-8e34-47f9-8ea1-d2f29414fd9d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd1dcc11-df32-408e-9548-4faf2556a924', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.916940] env[62923]: DEBUG oslo.service.loopingcall [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.920693] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.921372] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d755be5c-6c61-4de7-bfbc-0b99d6d3381a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.944023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d43676c-7914-4ca8-9e4a-3eb3b4bfd73c tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "f52f5912-d6e8-4da5-ac39-65bb065b6555" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.882s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.950619] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.950619] env[62923]: value = "task-1370296" [ 974.950619] env[62923]: _type = "Task" [ 974.950619] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.963775] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370296, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.063786] env[62923]: DEBUG nova.compute.manager [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Received event network-changed-fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.064124] env[62923]: DEBUG nova.compute.manager [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Refreshing instance network info cache due to event network-changed-fd1dcc11-df32-408e-9548-4faf2556a924. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.064124] env[62923]: DEBUG oslo_concurrency.lockutils [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] Acquiring lock "refresh_cache-1b155391-37d9-4186-b70d-84f2dec5af82" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.064541] env[62923]: DEBUG oslo_concurrency.lockutils [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] Acquired lock "refresh_cache-1b155391-37d9-4186-b70d-84f2dec5af82" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.064541] env[62923]: DEBUG nova.network.neutron [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Refreshing network info cache for port fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.083319] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848c549a-7ede-4e95-9500-4b83c478987d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.092935] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d34986-fd40-45c9-a8c1-ee91e1a51378 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.127851] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ab7fda-4039-4c39-ad9c-0061bfe3f636 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.135939] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c31e32b-bfaf-4f34-8718-b5a122fe3e7b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.151201] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.151407] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.151626] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.151811] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.151995] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.153917] env[62923]: DEBUG nova.compute.provider_tree [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.155378] env[62923]: INFO nova.compute.manager [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Terminating instance [ 975.157962] env[62923]: DEBUG nova.compute.manager [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 975.157962] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.158406] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd15073-b273-48ee-8f4b-7ef46336a6f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.167917] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.168195] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdd7a409-e32c-4c15-bc4d-ae51c2bb4432 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.177028] env[62923]: DEBUG oslo_vmware.api [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 975.177028] env[62923]: value = "task-1370297" [ 975.177028] env[62923]: _type = "Task" [ 975.177028] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.187246] env[62923]: DEBUG oslo_vmware.api [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.353860] env[62923]: INFO nova.compute.manager [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Took 12.41 seconds to build instance. [ 975.358852] env[62923]: DEBUG oslo_vmware.api [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Task: {'id': task-1370295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277241} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.359959] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.359959] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.359959] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.360226] env[62923]: INFO nova.compute.manager [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Took 1.66 seconds to destroy the instance on the hypervisor. [ 975.360358] env[62923]: DEBUG oslo.service.loopingcall [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.360549] env[62923]: DEBUG nova.compute.manager [-] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 975.360644] env[62923]: DEBUG nova.network.neutron [-] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 975.367490] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.461914] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370296, 'name': CreateVM_Task, 'duration_secs': 0.381435} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.461914] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.462659] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.462847] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.463166] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.463438] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd7f978c-8b03-4bc7-88bc-8a3531428d16 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.468729] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 975.468729] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527ef166-f5c1-fa0a-78b6-ef6b6f7dba4a" [ 975.468729] env[62923]: _type = "Task" [ 975.468729] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.477197] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527ef166-f5c1-fa0a-78b6-ef6b6f7dba4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.657934] env[62923]: DEBUG nova.scheduler.client.report [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 975.687145] env[62923]: DEBUG oslo_vmware.api [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370297, 'name': PowerOffVM_Task, 'duration_secs': 0.186315} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.687454] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.687626] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.687885] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-420b70fc-61df-4fbd-8fb2-fc780d695f06 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.771055] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.771436] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.771654] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleting the datastore file [datastore1] 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.771944] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc6d7515-3172-4679-ab70-6ae6dfdc5e70 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.778324] env[62923]: DEBUG nova.network.neutron [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Updated VIF entry in instance network info cache for port fd1dcc11-df32-408e-9548-4faf2556a924. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.778902] env[62923]: DEBUG nova.network.neutron [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Updating instance_info_cache with network_info: [{"id": "fd1dcc11-df32-408e-9548-4faf2556a924", "address": "fa:16:3e:fc:0e:20", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd1dcc11-df", "ovs_interfaceid": "fd1dcc11-df32-408e-9548-4faf2556a924", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.781367] env[62923]: DEBUG oslo_vmware.api [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for the task: (returnval){ [ 975.781367] env[62923]: value = "task-1370299" [ 975.781367] env[62923]: _type = "Task" [ 975.781367] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.790843] env[62923]: DEBUG oslo_vmware.api [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.855993] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5fd3d626-6a36-4483-90ae-2bced474161b tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.936s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.989130] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527ef166-f5c1-fa0a-78b6-ef6b6f7dba4a, 'name': SearchDatastore_Task, 'duration_secs': 0.024789} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.989130] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.989130] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.989130] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.989130] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.989130] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.989130] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a06d0052-416e-4be0-a114-78b4426b28f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.997785] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.997986] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.998723] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd92ea83-c276-4861-bc67-036d78f543ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.007694] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 976.007694] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529b430e-100b-d4f8-8846-086ad6736b33" [ 976.007694] env[62923]: _type = "Task" [ 976.007694] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.010122] env[62923]: DEBUG nova.compute.manager [req-f3274e23-b403-4cd8-acf8-c5cea5708752 req-832b848d-3b17-4fc8-aae8-76c8d3100799 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Received event network-vif-deleted-4d94a3a6-5f65-455c-981e-c9aa13c739d7 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.010978] env[62923]: INFO nova.compute.manager [req-f3274e23-b403-4cd8-acf8-c5cea5708752 req-832b848d-3b17-4fc8-aae8-76c8d3100799 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Neutron deleted interface 4d94a3a6-5f65-455c-981e-c9aa13c739d7; detaching it from the instance and deleting it from the info cache [ 976.010978] env[62923]: DEBUG nova.network.neutron [req-f3274e23-b403-4cd8-acf8-c5cea5708752 req-832b848d-3b17-4fc8-aae8-76c8d3100799 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.024176] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529b430e-100b-d4f8-8846-086ad6736b33, 'name': SearchDatastore_Task, 'duration_secs': 0.010376} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.024999] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77ce3b8-7d58-4996-8895-5b8e7ed41b1c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.031871] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 976.031871] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528c6d7b-dbbb-3e5c-914e-3306f37811b3" [ 976.031871] env[62923]: _type = "Task" [ 976.031871] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.040944] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528c6d7b-dbbb-3e5c-914e-3306f37811b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.116511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.116716] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.163544] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.785s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.165872] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.292s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.167492] env[62923]: INFO nova.compute.claims [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.282747] env[62923]: DEBUG oslo_concurrency.lockutils [req-1984d4d8-0fa2-4fea-bb14-0efc56de6b64 req-319b40d3-c29b-40b6-9aea-b0192ff56cb0 service nova] Releasing lock "refresh_cache-1b155391-37d9-4186-b70d-84f2dec5af82" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.292132] env[62923]: DEBUG oslo_vmware.api [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Task: {'id': task-1370299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360406} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.292433] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.292657] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.292889] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.293105] env[62923]: INFO nova.compute.manager [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 976.293388] env[62923]: DEBUG oslo.service.loopingcall [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.293620] env[62923]: DEBUG nova.compute.manager [-] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 976.293722] env[62923]: DEBUG nova.network.neutron [-] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.414146] env[62923]: DEBUG nova.network.neutron [-] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.520193] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0013c1ea-99b1-49d7-925e-2421f7310a42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.533573] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef629ef-8126-4146-81ff-e712e37ea97e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.558153] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528c6d7b-dbbb-3e5c-914e-3306f37811b3, 'name': SearchDatastore_Task, 'duration_secs': 0.01073} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.558438] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.558699] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.558959] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48659b5d-9c43-4625-ac28-c05f476bb893 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.571139] env[62923]: DEBUG nova.compute.manager [req-f3274e23-b403-4cd8-acf8-c5cea5708752 req-832b848d-3b17-4fc8-aae8-76c8d3100799 service nova] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Detach interface failed, port_id=4d94a3a6-5f65-455c-981e-c9aa13c739d7, reason: Instance 906da59a-24ac-4486-a835-62d3f81d3683 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 976.573123] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 976.573123] env[62923]: value = "task-1370300" [ 976.573123] env[62923]: _type = "Task" [ 976.573123] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.582658] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.619660] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 976.685262] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2ad65a73-df74-476a-aacf-b5869e47a3d0 tempest-ServerActionsV293TestJSON-843398642 tempest-ServerActionsV293TestJSON-843398642-project-member] Lock "65000ac5-1c28-4abe-bc96-c440f0b14d3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.026s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.900776] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.901131] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.901232] env[62923]: INFO nova.compute.manager [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Shelving [ 976.915686] env[62923]: INFO nova.compute.manager [-] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Took 1.55 seconds to deallocate network for instance. [ 977.016044] env[62923]: DEBUG nova.compute.manager [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Stashing vm_state: active {{(pid=62923) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 977.086096] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370300, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.135541] env[62923]: DEBUG nova.network.neutron [-] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.147428] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.352494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "b145b71c-c56b-4872-bb61-fa3e65fef04f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.352767] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.352990] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "b145b71c-c56b-4872-bb61-fa3e65fef04f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.353187] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.353375] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.357607] env[62923]: INFO nova.compute.manager [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Terminating instance [ 977.359552] env[62923]: DEBUG nova.compute.manager [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 977.359747] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.360600] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c485de-e486-4c74-9297-1c047ebc5484 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.370026] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 977.370264] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06b7eb1f-0312-4990-b0fa-92c2fe6d1969 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.378740] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 977.378740] env[62923]: value = "task-1370301" [ 977.378740] env[62923]: _type = "Task" [ 977.378740] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.380266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84d91d4-3625-477c-98a5-aaa04a7d661a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.393551] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e1ab5b-89a2-47d9-b686-27e84a72900f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.396800] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.427749] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.430501] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae54e16-beec-4c5b-ad6b-b2b03be5dbe1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.433310] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 977.433531] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19ae9f12-daeb-4d18-bee3-857e8c49d90f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.442590] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e04163-78dc-411d-b722-9243d2a09c0c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.446568] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 977.446568] env[62923]: value = "task-1370302" [ 977.446568] env[62923]: _type = "Task" [ 977.446568] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.458463] env[62923]: DEBUG nova.compute.provider_tree [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.463119] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.537537] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.588586] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370300, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.775467} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.588586] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.588972] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.589130] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8b071e1-7239-4c33-92fd-5992b9b8935e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.597388] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 977.597388] env[62923]: value = "task-1370303" [ 977.597388] env[62923]: _type = "Task" [ 977.597388] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.611918] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.637646] env[62923]: INFO nova.compute.manager [-] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Took 1.34 seconds to deallocate network for instance. [ 977.890159] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370301, 'name': PowerOffVM_Task, 'duration_secs': 0.205615} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.890440] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.890609] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.890858] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b126fbc-ef1c-4ca3-9878-46fa16e7be3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.956994] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370302, 'name': PowerOffVM_Task, 'duration_secs': 0.234477} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.957387] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.958216] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f74fa00-53c7-4047-a195-d578f1babfd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.976957] env[62923]: DEBUG nova.scheduler.client.report [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 977.980726] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337417e8-c6ff-45e3-b8fb-59f1c6fccea3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.009027] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 978.009027] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 978.009027] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleting the datastore file [datastore2] b145b71c-c56b-4872-bb61-fa3e65fef04f {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.009027] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0425e567-ec5d-4b2a-865a-3e2290972b04 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.015626] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 978.015626] env[62923]: value = "task-1370305" [ 978.015626] env[62923]: _type = "Task" [ 978.015626] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.024274] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.043181] env[62923]: DEBUG nova.compute.manager [req-006440a2-9e8f-4e38-89df-12cfb01599ef req-b8bfb3a8-a3d2-4919-b890-92ac94e7ab51 service nova] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Received event network-vif-deleted-cea0b4ee-b6d9-4245-b98f-c6ccd6524a2e {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 978.108119] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072232} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.108431] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.109254] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3115b8-5a62-4b89-908e-345a2ddd5192 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.132133] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.132452] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd58d964-06d6-4e57-9ee1-0bf660534ee0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.146917] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.154107] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 978.154107] env[62923]: value = "task-1370306" [ 978.154107] env[62923]: _type = "Task" [ 978.154107] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.163294] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.484437] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.485132] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 978.488441] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.639s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.492899] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Creating Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 978.493240] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fc486fca-fdea-47df-a844-4d8bf489dc43 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.503045] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 978.503045] env[62923]: value = "task-1370307" [ 978.503045] env[62923]: _type = "Task" [ 978.503045] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.518583] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370307, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.528413] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.666176] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370306, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.991029] env[62923]: DEBUG nova.compute.utils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 978.991666] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 978.991829] env[62923]: DEBUG nova.network.neutron [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.013483] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370307, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.027713] env[62923]: DEBUG oslo_vmware.api [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.78737} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.030368] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 979.030601] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 979.030731] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 979.030901] env[62923]: INFO nova.compute.manager [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Took 1.67 seconds to destroy the instance on the hypervisor. [ 979.031158] env[62923]: DEBUG oslo.service.loopingcall [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.031531] env[62923]: DEBUG nova.compute.manager [-] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 979.031620] env[62923]: DEBUG nova.network.neutron [-] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 979.047160] env[62923]: DEBUG nova.policy [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1b761abfd44661a6da62ba35ec442f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2948b6c7e6f04cf98b36777c2fc94fc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 979.172883] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370306, 'name': ReconfigVM_Task, 'duration_secs': 0.768507} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.173576] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.176414] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7b66a60-792e-4cec-95b2-482d69ab0f1f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.186903] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 979.186903] env[62923]: value = "task-1370308" [ 979.186903] env[62923]: _type = "Task" [ 979.186903] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.198712] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370308, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.214897] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ca9673-b51d-4609-a7fd-24b83ecf7b7c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.223034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0920183a-3800-403f-ac85-e78c0a2eca42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.256616] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0490e77-d861-4dbc-a0da-af78bec121be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.266491] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9b4b20-d8ef-4bc2-bc8f-efb42fc80aa9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.281531] env[62923]: DEBUG nova.compute.provider_tree [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.488945] env[62923]: DEBUG nova.network.neutron [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Successfully created port: 032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.498198] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 979.517485] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370307, 'name': CreateSnapshot_Task, 'duration_secs': 0.736917} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.517798] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Created Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 979.519008] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efee800-189e-4ca3-8d25-0ae08b4d9044 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.697609] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370308, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.707679] env[62923]: DEBUG nova.compute.manager [req-4b249d79-58e4-4f83-8231-bee9d1d8c6c0 req-0fc27e99-3d1c-4584-9ca4-b58b52b0522e service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Received event network-vif-deleted-bed590d2-cf12-4135-a164-a61cade082eb {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 979.707917] env[62923]: INFO nova.compute.manager [req-4b249d79-58e4-4f83-8231-bee9d1d8c6c0 req-0fc27e99-3d1c-4584-9ca4-b58b52b0522e service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Neutron deleted interface bed590d2-cf12-4135-a164-a61cade082eb; detaching it from the instance and deleting it from the info cache [ 979.708065] env[62923]: DEBUG nova.network.neutron [req-4b249d79-58e4-4f83-8231-bee9d1d8c6c0 req-0fc27e99-3d1c-4584-9ca4-b58b52b0522e service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.784516] env[62923]: DEBUG nova.scheduler.client.report [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.044515] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Creating linked-clone VM from snapshot {{(pid=62923) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 980.044515] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-680c4d00-a443-4dda-b7b5-e973db828138 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.055306] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 980.055306] env[62923]: value = "task-1370309" [ 980.055306] env[62923]: _type = "Task" [ 980.055306] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.064549] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370309, 'name': CloneVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.188017] env[62923]: DEBUG nova.network.neutron [-] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.200684] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370308, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.213024] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-784b9a8b-6c62-44aa-9f0f-66625818cbaf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.224251] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffec14d3-5686-4c57-92f6-aaf66d62cc25 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.255247] env[62923]: DEBUG nova.compute.manager [req-4b249d79-58e4-4f83-8231-bee9d1d8c6c0 req-0fc27e99-3d1c-4584-9ca4-b58b52b0522e service nova] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Detach interface failed, port_id=bed590d2-cf12-4135-a164-a61cade082eb, reason: Instance b145b71c-c56b-4872-bb61-fa3e65fef04f could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 980.511645] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 980.543395] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.543674] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.543882] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.544087] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.544249] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.544398] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.544605] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.544767] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.544935] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.545126] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.545333] env[62923]: DEBUG nova.virt.hardware [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.546228] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da112aa-714f-441e-ae92-3facfb8242f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.554757] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae54f45e-00ee-4259-aafd-b4d84077e8aa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.567691] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370309, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.693294] env[62923]: INFO nova.compute.manager [-] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Took 1.66 seconds to deallocate network for instance. [ 980.702341] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370308, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.796182] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.307s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.800711] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.433s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.800897] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.801065] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 980.801377] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.654s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.803167] env[62923]: INFO nova.compute.claims [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.807643] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3064c37-a56a-48d4-ab67-6fe4c05ec768 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.816707] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24abb7ba-8b58-45f8-9a44-0889f0a60012 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.837092] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd34c9a2-37ba-4f61-ae97-4051ee5d3df9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.847027] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa79c15-52b3-4210-bb0f-17565e938ef1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.878933] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179568MB free_disk=147GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 980.879110] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.882140] env[62923]: DEBUG nova.compute.manager [req-6117486c-6e6f-4c10-9d6a-945e1b43d546 req-7055d908-f9d1-4c1d-b797-35528d503854 service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Received event network-vif-plugged-032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.882354] env[62923]: DEBUG oslo_concurrency.lockutils [req-6117486c-6e6f-4c10-9d6a-945e1b43d546 req-7055d908-f9d1-4c1d-b797-35528d503854 service nova] Acquiring lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.882572] env[62923]: DEBUG oslo_concurrency.lockutils [req-6117486c-6e6f-4c10-9d6a-945e1b43d546 req-7055d908-f9d1-4c1d-b797-35528d503854 service nova] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.882807] env[62923]: DEBUG oslo_concurrency.lockutils [req-6117486c-6e6f-4c10-9d6a-945e1b43d546 req-7055d908-f9d1-4c1d-b797-35528d503854 service nova] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.882973] env[62923]: DEBUG nova.compute.manager [req-6117486c-6e6f-4c10-9d6a-945e1b43d546 req-7055d908-f9d1-4c1d-b797-35528d503854 service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] No waiting events found dispatching network-vif-plugged-032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.883069] env[62923]: WARNING nova.compute.manager [req-6117486c-6e6f-4c10-9d6a-945e1b43d546 req-7055d908-f9d1-4c1d-b797-35528d503854 service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Received unexpected event network-vif-plugged-032aec56-6f4e-4f4c-8c78-8810c6ce2b07 for instance with vm_state building and task_state spawning. [ 980.970526] env[62923]: DEBUG nova.network.neutron [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Successfully updated port: 032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 981.069967] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370309, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.200765] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.201073] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370308, 'name': Rename_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.359580] env[62923]: INFO nova.scheduler.client.report [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocation for migration 26465ae3-2214-45b4-8a00-0bc17e0a47ab [ 981.473650] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "refresh_cache-9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.475043] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "refresh_cache-9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.475043] env[62923]: DEBUG nova.network.neutron [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.577037] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370309, 'name': CloneVM_Task, 'duration_secs': 1.439787} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.577199] env[62923]: INFO nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Created linked-clone VM from snapshot [ 981.578207] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd255afd-ccac-458d-bbbd-f978d1020b56 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.586987] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Uploading image 51961a0b-7b31-4a1e-b2f1-1914040134b5 {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 981.611738] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 981.611738] env[62923]: value = "vm-291514" [ 981.611738] env[62923]: _type = "VirtualMachine" [ 981.611738] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 981.612016] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0cdda5c5-abdb-4890-b6a0-4edf7646a4c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.619801] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease: (returnval){ [ 981.619801] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52faeadb-623a-b9c4-166d-46447017a5f4" [ 981.619801] env[62923]: _type = "HttpNfcLease" [ 981.619801] env[62923]: } obtained for exporting VM: (result){ [ 981.619801] env[62923]: value = "vm-291514" [ 981.619801] env[62923]: _type = "VirtualMachine" [ 981.619801] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 981.620139] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the lease: (returnval){ [ 981.620139] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52faeadb-623a-b9c4-166d-46447017a5f4" [ 981.620139] env[62923]: _type = "HttpNfcLease" [ 981.620139] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 981.627586] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 981.627586] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52faeadb-623a-b9c4-166d-46447017a5f4" [ 981.627586] env[62923]: _type = "HttpNfcLease" [ 981.627586] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 981.701522] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370308, 'name': Rename_Task, 'duration_secs': 2.026005} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.701838] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.702109] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17f76a98-047a-43ad-8825-1c2ef4d1a993 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.709710] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 981.709710] env[62923]: value = "task-1370311" [ 981.709710] env[62923]: _type = "Task" [ 981.709710] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.720876] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.867835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d73c171e-083f-4401-9dc9-7e6887305204 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.005s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.989414] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21deff1-1aa5-4512-b9e6-69c9d617772b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.998807] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ce8ea7-038b-4e0e-a723-f00532f84496 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.031156] env[62923]: DEBUG nova.network.neutron [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.033543] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1f04c9-7eaa-4124-8b85-f676f5d632ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.042139] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26f4133a-a663-4388-a35e-658b0ccd75d2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.055810] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.056081] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.056295] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.056477] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.056644] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.058495] env[62923]: DEBUG nova.compute.provider_tree [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.061674] env[62923]: INFO nova.compute.manager [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Terminating instance [ 982.063822] env[62923]: DEBUG nova.compute.manager [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 982.064039] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.065037] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9429b7-1beb-483d-bc92-9f95ff3922b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.074867] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.075194] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef676aad-efe9-41ef-933d-a5dd01f76c8d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.081472] env[62923]: DEBUG oslo_vmware.api [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 982.081472] env[62923]: value = "task-1370312" [ 982.081472] env[62923]: _type = "Task" [ 982.081472] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.092107] env[62923]: DEBUG oslo_vmware.api [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.128497] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 982.128497] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52faeadb-623a-b9c4-166d-46447017a5f4" [ 982.128497] env[62923]: _type = "HttpNfcLease" [ 982.128497] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 982.128922] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 982.128922] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52faeadb-623a-b9c4-166d-46447017a5f4" [ 982.128922] env[62923]: _type = "HttpNfcLease" [ 982.128922] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 982.129751] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454cafcc-c10d-4d8f-8c0d-54de0087ab9e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.138450] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1af8-48ec-ebf1-9b8b-a8d837079dda/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 982.138640] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1af8-48ec-ebf1-9b8b-a8d837079dda/disk-0.vmdk for reading. {{(pid=62923) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 982.223866] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370311, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.243344] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bdcc08f5-4c9d-413e-980c-3d6ee713e831 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.287504] env[62923]: DEBUG nova.network.neutron [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Updating instance_info_cache with network_info: [{"id": "032aec56-6f4e-4f4c-8c78-8810c6ce2b07", "address": "fa:16:3e:06:34:5d", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap032aec56-6f", "ovs_interfaceid": "032aec56-6f4e-4f4c-8c78-8810c6ce2b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.510418] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.510714] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.564529] env[62923]: DEBUG nova.scheduler.client.report [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.594939] env[62923]: DEBUG oslo_vmware.api [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370312, 'name': PowerOffVM_Task, 'duration_secs': 0.233504} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.595262] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.595440] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.595700] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dccda96-d606-4b10-88d9-85bfe0199dc7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.671758] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.672321] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.672605] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleting the datastore file [datastore1] 2a9a93f8-9398-4a19-a149-a1092ceb416d {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.672957] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-549b0ad6-60d4-45e1-8455-6ce6f232ad76 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.682149] env[62923]: DEBUG oslo_vmware.api [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 982.682149] env[62923]: value = "task-1370314" [ 982.682149] env[62923]: _type = "Task" [ 982.682149] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.691710] env[62923]: DEBUG oslo_vmware.api [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.725759] env[62923]: DEBUG oslo_vmware.api [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370311, 'name': PowerOnVM_Task, 'duration_secs': 0.907654} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.726269] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.726635] env[62923]: INFO nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Took 9.89 seconds to spawn the instance on the hypervisor. [ 982.726975] env[62923]: DEBUG nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 982.728294] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f471d448-bd55-4a14-b8b5-d05a85a95b48 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.790206] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "refresh_cache-9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.790533] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Instance network_info: |[{"id": "032aec56-6f4e-4f4c-8c78-8810c6ce2b07", "address": "fa:16:3e:06:34:5d", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap032aec56-6f", "ovs_interfaceid": "032aec56-6f4e-4f4c-8c78-8810c6ce2b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 982.791259] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:34:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '032aec56-6f4e-4f4c-8c78-8810c6ce2b07', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.799430] env[62923]: DEBUG oslo.service.loopingcall [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.799743] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.800040] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb929c8a-5923-478d-83ee-6d915641d06a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.823874] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.823874] env[62923]: value = "task-1370315" [ 982.823874] env[62923]: _type = "Task" [ 982.823874] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.833059] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370315, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.912987] env[62923]: DEBUG nova.compute.manager [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Received event network-changed-032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 982.913450] env[62923]: DEBUG nova.compute.manager [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Refreshing instance network info cache due to event network-changed-032aec56-6f4e-4f4c-8c78-8810c6ce2b07. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 982.913820] env[62923]: DEBUG oslo_concurrency.lockutils [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] Acquiring lock "refresh_cache-9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.914025] env[62923]: DEBUG oslo_concurrency.lockutils [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] Acquired lock "refresh_cache-9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.914252] env[62923]: DEBUG nova.network.neutron [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Refreshing network info cache for port 032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 983.013636] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 983.071280] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.072126] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 983.075092] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.647s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.075407] env[62923]: DEBUG nova.objects.instance [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lazy-loading 'resources' on Instance uuid 906da59a-24ac-4486-a835-62d3f81d3683 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.192521] env[62923]: DEBUG oslo_vmware.api [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291849} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.192900] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 983.193141] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 983.193354] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 983.193580] env[62923]: INFO nova.compute.manager [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 983.193869] env[62923]: DEBUG oslo.service.loopingcall [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.194189] env[62923]: DEBUG nova.compute.manager [-] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 983.194341] env[62923]: DEBUG nova.network.neutron [-] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.251918] env[62923]: INFO nova.compute.manager [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Took 18.36 seconds to build instance. [ 983.333915] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370315, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.544425] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.578966] env[62923]: DEBUG nova.compute.utils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 983.585925] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 983.586509] env[62923]: DEBUG nova.network.neutron [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 983.628966] env[62923]: DEBUG nova.policy [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '676a737149a9418498a55f83760df073', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1cf5e642524949a8366bf54d00593e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 983.676953] env[62923]: DEBUG nova.network.neutron [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Updated VIF entry in instance network info cache for port 032aec56-6f4e-4f4c-8c78-8810c6ce2b07. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.677435] env[62923]: DEBUG nova.network.neutron [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Updating instance_info_cache with network_info: [{"id": "032aec56-6f4e-4f4c-8c78-8810c6ce2b07", "address": "fa:16:3e:06:34:5d", "network": {"id": "9ed96510-533e-4ed6-bf9b-e1a401a9df79", "bridge": "br-int", "label": "tempest-ServersTestJSON-1060581969-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2948b6c7e6f04cf98b36777c2fc94fc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap032aec56-6f", "ovs_interfaceid": "032aec56-6f4e-4f4c-8c78-8810c6ce2b07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.754193] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fa2f6988-f9d5-474c-a9fe-990498b64efb tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1b155391-37d9-4186-b70d-84f2dec5af82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.873s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.816265] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c604f6-7880-453c-944e-96ba6f3a6261 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.825212] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd4f57a-2a22-4d52-97c1-b5e1011578db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.839849] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370315, 'name': CreateVM_Task, 'duration_secs': 0.528763} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.870567] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.871693] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.871976] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.872445] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.873215] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3554070-a2ff-42f1-8f4b-7cced37f3b31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.875854] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf22e91-186a-4b58-8180-9d91e5189500 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.881094] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 983.881094] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a04e50-c079-9cec-915b-d89231acebce" [ 983.881094] env[62923]: _type = "Task" [ 983.881094] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.887459] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12149c48-44e4-4b1b-b25f-271588f9203a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.896035] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a04e50-c079-9cec-915b-d89231acebce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.904762] env[62923]: DEBUG nova.compute.provider_tree [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.016995] env[62923]: DEBUG nova.network.neutron [-] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.086698] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 984.113873] env[62923]: DEBUG nova.network.neutron [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Successfully created port: 35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.185440] env[62923]: DEBUG oslo_concurrency.lockutils [req-8294a19e-a1f7-466f-8ea8-73ca99fc578d req-40aa9936-4968-4f3b-82c0-cee4c7f812ca service nova] Releasing lock "refresh_cache-9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.393958] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a04e50-c079-9cec-915b-d89231acebce, 'name': SearchDatastore_Task, 'duration_secs': 0.020896} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.394465] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.394741] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.395083] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.395314] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.395560] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.395932] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22b1604b-1b6b-434d-b3d7-e29c53c7e727 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.408294] env[62923]: DEBUG nova.scheduler.client.report [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.415581] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.415735] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 984.416588] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-915aae4c-32ea-4d1b-9fa2-9fe4044033be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.422496] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 984.422496] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52893170-63d1-48e8-6aaf-ab4ffef14450" [ 984.422496] env[62923]: _type = "Task" [ 984.422496] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.431061] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52893170-63d1-48e8-6aaf-ab4ffef14450, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.520023] env[62923]: INFO nova.compute.manager [-] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Took 1.33 seconds to deallocate network for instance. [ 984.706039] env[62923]: INFO nova.compute.manager [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Rebuilding instance [ 984.751681] env[62923]: DEBUG nova.compute.manager [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 984.752718] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b010143c-5ea2-4890-8269-c1cda6757a64 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.917223] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.919419] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.382s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.933269] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52893170-63d1-48e8-6aaf-ab4ffef14450, 'name': SearchDatastore_Task, 'duration_secs': 0.020456} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.934496] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ce5f82-8af7-4b36-a2c8-0e33970af028 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.938290] env[62923]: INFO nova.scheduler.client.report [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Deleted allocations for instance 906da59a-24ac-4486-a835-62d3f81d3683 [ 984.940719] env[62923]: DEBUG nova.compute.manager [req-d5aebcd7-86a6-4deb-85cd-3a493c943cb0 req-0ce63c5c-6a7b-4e0b-956e-190d96eb5853 service nova] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Received event network-vif-deleted-422a6526-df54-4c7f-a43c-01c8902e1fb8 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 984.946010] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 984.946010] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5287317f-62d3-3d0e-3e66-0432e272391d" [ 984.946010] env[62923]: _type = "Task" [ 984.946010] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.954069] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5287317f-62d3-3d0e-3e66-0432e272391d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.026192] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.097430] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 985.118459] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 985.118725] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 985.118882] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 985.119155] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 985.119326] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 985.119475] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 985.119683] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 985.119844] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 985.120032] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 985.120191] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 985.120375] env[62923]: DEBUG nova.virt.hardware [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 985.121259] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba34035-f488-48e6-90fd-a9ebc5cacd24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.129844] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5397def-1488-4c1b-9d0d-b3dde9219e74 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.265766] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.266138] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b877252-a861-4ab6-8aba-e9d784ea5d3b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.273813] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 985.273813] env[62923]: value = "task-1370316" [ 985.273813] env[62923]: _type = "Task" [ 985.273813] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.282459] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.424935] env[62923]: INFO nova.compute.claims [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.453225] env[62923]: DEBUG oslo_concurrency.lockutils [None req-96a4909b-f49f-4cf2-84fc-03274ff56015 tempest-ImagesTestJSON-863450638 tempest-ImagesTestJSON-863450638-project-member] Lock "906da59a-24ac-4486-a835-62d3f81d3683" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.756s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.461601] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5287317f-62d3-3d0e-3e66-0432e272391d, 'name': SearchDatastore_Task, 'duration_secs': 0.010491} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.461801] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.462089] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f/9f0b13d8-eb25-474c-b9bb-80ee9dd4955f.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 985.462360] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89d5fdc6-878e-4f09-9ce5-857df784bab9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.471751] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 985.471751] env[62923]: value = "task-1370317" [ 985.471751] env[62923]: _type = "Task" [ 985.471751] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.480226] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.787500] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370316, 'name': PowerOffVM_Task, 'duration_secs': 0.219081} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.787992] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.788557] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.789591] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba00004-fbff-4e30-8db2-dd12e5e3cceb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.800096] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.800617] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c40534bd-5073-44b7-aa0c-d544fe7d66cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.871852] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.872265] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.872557] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleting the datastore file [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.872889] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-825d13fe-08d1-42cf-9980-68264fb3a9ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.883148] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 985.883148] env[62923]: value = "task-1370319" [ 985.883148] env[62923]: _type = "Task" [ 985.883148] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.891704] env[62923]: DEBUG nova.network.neutron [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Successfully updated port: 35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 985.901578] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.932013] env[62923]: INFO nova.compute.resource_tracker [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating resource usage from migration 73716037-3a40-4904-be0e-5a06f0bc0a28 [ 985.984276] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370317, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.124211] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e0c902-7b63-4618-8c4c-4fe156fc0c0b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.132280] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e8b20d-cc58-4eef-9f6b-187d9f96371e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.167522] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee36b97-1a85-4957-963b-f20a746db8d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.175668] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00bdb6d-89c3-451e-91cf-f793c5e00973 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.191933] env[62923]: DEBUG nova.compute.provider_tree [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.394048] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196513} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.394321] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.394511] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.394687] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.399497] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.399497] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.399497] env[62923]: DEBUG nova.network.neutron [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.484257] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370317, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.695090] env[62923]: DEBUG nova.scheduler.client.report [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.984167] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370317, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.490563} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.984459] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f/9f0b13d8-eb25-474c-b9bb-80ee9dd4955f.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 986.984680] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.985046] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8487df8-7b13-4479-86a3-2a946f8c7391 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.991970] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 986.991970] env[62923]: value = "task-1370320" [ 986.991970] env[62923]: _type = "Task" [ 986.991970] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.000755] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.103163] env[62923]: DEBUG nova.network.neutron [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.205658] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.285s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.206079] env[62923]: INFO nova.compute.manager [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Migrating [ 987.212752] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.066s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.213131] env[62923]: DEBUG nova.objects.instance [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lazy-loading 'resources' on Instance uuid 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.231391] env[62923]: DEBUG nova.compute.manager [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Received event network-vif-plugged-35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.231391] env[62923]: DEBUG oslo_concurrency.lockutils [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.231391] env[62923]: DEBUG oslo_concurrency.lockutils [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.231391] env[62923]: DEBUG oslo_concurrency.lockutils [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.231391] env[62923]: DEBUG nova.compute.manager [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] No waiting events found dispatching network-vif-plugged-35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 987.231391] env[62923]: WARNING nova.compute.manager [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Received unexpected event network-vif-plugged-35f893d5-3b23-4350-92a9-e3803a075eb0 for instance with vm_state building and task_state spawning. [ 987.231391] env[62923]: DEBUG nova.compute.manager [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Received event network-changed-35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.231391] env[62923]: DEBUG nova.compute.manager [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Refreshing instance network info cache due to event network-changed-35f893d5-3b23-4350-92a9-e3803a075eb0. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 987.232060] env[62923]: DEBUG oslo_concurrency.lockutils [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] Acquiring lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.446262] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 987.446764] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 987.446948] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.447163] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 987.447403] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.447512] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 987.447660] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 987.447840] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 987.448023] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 987.448226] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 987.448405] env[62923]: DEBUG nova.virt.hardware [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 987.449347] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b74172-38cc-47f1-86d1-98a9dea619f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.458129] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e75cb13-49b8-45e0-9805-c331994f0fb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.474628] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:0e:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4c7a041-8e34-47f9-8ea1-d2f29414fd9d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd1dcc11-df32-408e-9548-4faf2556a924', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 987.482369] env[62923]: DEBUG oslo.service.loopingcall [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 987.483575] env[62923]: DEBUG nova.network.neutron [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updating instance_info_cache with network_info: [{"id": "35f893d5-3b23-4350-92a9-e3803a075eb0", "address": "fa:16:3e:bb:43:f6", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f893d5-3b", "ovs_interfaceid": "35f893d5-3b23-4350-92a9-e3803a075eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.484747] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 987.484987] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c57a253-d2dd-444e-89fd-f5d32a2d99c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.509441] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074066} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.510789] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.511090] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 987.511090] env[62923]: value = "task-1370321" [ 987.511090] env[62923]: _type = "Task" [ 987.511090] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.511931] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bdf37c-2ff6-4f9e-9481-b9eb8539adc9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.523070] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370321, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.541094] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f/9f0b13d8-eb25-474c-b9bb-80ee9dd4955f.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.541433] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eed729b-8ec2-499b-95a5-7f22963246de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.560949] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 987.560949] env[62923]: value = "task-1370322" [ 987.560949] env[62923]: _type = "Task" [ 987.560949] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.571414] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.735083] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.735083] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.735083] env[62923]: DEBUG nova.network.neutron [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.001140] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.001140] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance network_info: |[{"id": "35f893d5-3b23-4350-92a9-e3803a075eb0", "address": "fa:16:3e:bb:43:f6", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f893d5-3b", "ovs_interfaceid": "35f893d5-3b23-4350-92a9-e3803a075eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 988.001582] env[62923]: DEBUG oslo_concurrency.lockutils [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] Acquired lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.001943] env[62923]: DEBUG nova.network.neutron [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Refreshing network info cache for port 35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.003284] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:43:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35f893d5-3b23-4350-92a9-e3803a075eb0', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.015717] env[62923]: DEBUG oslo.service.loopingcall [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.020033] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.026098] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56e759bc-64de-495c-8116-4c178f874707 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.043805] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb7972f-9a26-4ec2-94c0-9552f36fdbd5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.052642] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370321, 'name': CreateVM_Task, 'duration_secs': 0.465784} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.055599] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.055974] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.055974] env[62923]: value = "task-1370323" [ 988.055974] env[62923]: _type = "Task" [ 988.055974] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.056647] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.056887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.057312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.058482] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442ea1eb-0c5b-4611-a582-9fac59780442 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.062153] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad48ac43-e7f3-4bfa-b298-e63a5ba4d2c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.074394] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 988.074394] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ab86d8-7bbf-eed7-d964-e1d50ec340d9" [ 988.074394] env[62923]: _type = "Task" [ 988.074394] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.110449] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370323, 'name': CreateVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.111140] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370322, 'name': ReconfigVM_Task, 'duration_secs': 0.351155} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.116265] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242c276c-08d3-4097-a65c-8c6dccf87b9d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.118851] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f/9f0b13d8-eb25-474c-b9bb-80ee9dd4955f.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.119552] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-672acbdf-96bd-475f-ad33-bb51221b78d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.132023] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ab86d8-7bbf-eed7-d964-e1d50ec340d9, 'name': SearchDatastore_Task, 'duration_secs': 0.01107} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.132023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.132023] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.132023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.132023] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.132399] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.133345] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04535ae1-7a3d-4d3c-bba7-2dea784862c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.137304] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 988.137304] env[62923]: value = "task-1370324" [ 988.137304] env[62923]: _type = "Task" [ 988.137304] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.137546] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89727b4b-ce44-476a-b768-3ebfb66a0ac2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.152601] env[62923]: DEBUG nova.compute.provider_tree [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.159919] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370324, 'name': Rename_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.161367] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.161591] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.163013] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08fc7957-44a7-40bc-acbe-b8c895368d51 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.169851] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 988.169851] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5202d1c4-2c9d-adec-dd1e-a5086b11507d" [ 988.169851] env[62923]: _type = "Task" [ 988.169851] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.178459] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5202d1c4-2c9d-adec-dd1e-a5086b11507d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.573662] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370323, 'name': CreateVM_Task, 'duration_secs': 0.330563} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.573997] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.574978] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.575191] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.575503] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 988.575775] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80520a38-d392-43e4-b6d2-ed769d57b345 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.581158] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 988.581158] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cffe9e-d278-b3ba-a99c-dd8b42067fc4" [ 988.581158] env[62923]: _type = "Task" [ 988.581158] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.594434] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cffe9e-d278-b3ba-a99c-dd8b42067fc4, 'name': SearchDatastore_Task, 'duration_secs': 0.009819} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.594731] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.594962] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 988.595216] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.595359] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.595541] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.595804] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c322fb2-9ffc-4915-b7bd-f8d866fff841 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.608021] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.608021] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 988.608251] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56aaa83c-3417-4320-a7c8-1063cc8f9948 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.614219] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 988.614219] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cc7285-8256-10e2-6700-b348dd95b932" [ 988.614219] env[62923]: _type = "Task" [ 988.614219] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.631026] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52cc7285-8256-10e2-6700-b348dd95b932, 'name': SearchDatastore_Task, 'duration_secs': 0.008493} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.631026] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a84ea53-87da-4c61-9eb5-eabed782a283 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.636576] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 988.636576] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ac65db-edd6-05be-7c14-9f1e7cc6590f" [ 988.636576] env[62923]: _type = "Task" [ 988.636576] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.651365] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ac65db-edd6-05be-7c14-9f1e7cc6590f, 'name': SearchDatastore_Task, 'duration_secs': 0.008995} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.652032] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.652032] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.655113] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d7495ad-9dad-45af-9058-d351b6ff3a83 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.657939] env[62923]: DEBUG nova.scheduler.client.report [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 988.660987] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370324, 'name': Rename_Task, 'duration_secs': 0.148805} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.661261] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.661902] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b5c54dc-6bbb-4557-aae9-313bdfccdab6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.668829] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 988.668829] env[62923]: value = "task-1370325" [ 988.668829] env[62923]: _type = "Task" [ 988.668829] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.680649] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 988.680649] env[62923]: value = "task-1370326" [ 988.680649] env[62923]: _type = "Task" [ 988.680649] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.695114] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5202d1c4-2c9d-adec-dd1e-a5086b11507d, 'name': SearchDatastore_Task, 'duration_secs': 0.010961} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.695352] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.696762] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f034f17-8a57-439e-98d0-ef534626e3af {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.702444] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370326, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.705556] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 988.705556] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d70e4c-8efb-9e83-c278-01b346ee8b94" [ 988.705556] env[62923]: _type = "Task" [ 988.705556] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.714188] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d70e4c-8efb-9e83-c278-01b346ee8b94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.716325] env[62923]: DEBUG nova.network.neutron [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [{"id": "990e9014-0a5e-465f-8306-404937c589e0", "address": "fa:16:3e:1f:14:02", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap990e9014-0a", "ovs_interfaceid": "990e9014-0a5e-465f-8306-404937c589e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.774647] env[62923]: DEBUG nova.network.neutron [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updated VIF entry in instance network info cache for port 35f893d5-3b23-4350-92a9-e3803a075eb0. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.775122] env[62923]: DEBUG nova.network.neutron [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updating instance_info_cache with network_info: [{"id": "35f893d5-3b23-4350-92a9-e3803a075eb0", "address": "fa:16:3e:bb:43:f6", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f893d5-3b", "ovs_interfaceid": "35f893d5-3b23-4350-92a9-e3803a075eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.168887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.171274] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.292s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.182743] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370325, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.192777] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370326, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.211086] env[62923]: INFO nova.scheduler.client.report [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Deleted allocations for instance 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7 [ 989.222044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.223466] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d70e4c-8efb-9e83-c278-01b346ee8b94, 'name': SearchDatastore_Task, 'duration_secs': 0.022891} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.224844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.225107] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 989.225667] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fe77c45-90c6-480e-a1a7-875644f73366 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.234712] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 989.234712] env[62923]: value = "task-1370327" [ 989.234712] env[62923]: _type = "Task" [ 989.234712] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.248144] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.278787] env[62923]: DEBUG oslo_concurrency.lockutils [req-07e31ef5-71e8-4a5a-bfb7-22440dbd043d req-31a77be4-4a6b-414b-9128-4d9d85d1c766 service nova] Releasing lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.698028] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520725} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.699335] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.699335] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.700191] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-581f36e9-5317-4097-a409-3aabf2c12849 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.710560] env[62923]: DEBUG oslo_vmware.api [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370326, 'name': PowerOnVM_Task, 'duration_secs': 0.726405} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.711524] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.711767] env[62923]: INFO nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Took 9.20 seconds to spawn the instance on the hypervisor. [ 989.712053] env[62923]: DEBUG nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 989.715125] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3334b344-09a1-40f5-b4fd-31ec9b434ff4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.720259] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 989.720259] env[62923]: value = "task-1370328" [ 989.720259] env[62923]: _type = "Task" [ 989.720259] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.737396] env[62923]: DEBUG oslo_concurrency.lockutils [None req-81609d5c-e2a4-4d44-8962-5823d848e000 tempest-AttachInterfacesTestJSON-343038200 tempest-AttachInterfacesTestJSON-343038200-project-member] Lock "8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.586s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.746812] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.752572] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370327, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.195291] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Applying migration context for instance 92a10f0a-4bfd-405a-956e-3ea29a740b28 as it has an incoming, in-progress migration 73716037-3a40-4904-be0e-5a06f0bc0a28. Migration status is migrating {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 990.197353] env[62923]: INFO nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating resource usage from migration 73716037-3a40-4904-be0e-5a06f0bc0a28 [ 990.221383] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.221534] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.221534] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.221849] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.221849] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 75f9473f-ca67-4bb5-8663-0ce3709885e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.221952] env[62923]: WARNING nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b145b71c-c56b-4872-bb61-fa3e65fef04f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 990.222042] env[62923]: WARNING nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 2a9a93f8-9398-4a19-a149-a1092ceb416d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 990.222166] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 1b155391-37d9-4186-b70d-84f2dec5af82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.222278] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.222387] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b459a438-c287-4fbd-80f5-b5d3c31b83c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.222515] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Migration 73716037-3a40-4904-be0e-5a06f0bc0a28 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 990.222626] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 92a10f0a-4bfd-405a-956e-3ea29a740b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 990.237181] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078344} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.240589] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.248529] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa01a70-2bdf-4477-acda-c7359f2c1bc3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.259906] env[62923]: INFO nova.compute.manager [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Took 20.40 seconds to build instance. [ 990.268721] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644509} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.277754] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 990.278034] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 990.287676] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.288121] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b3cb2fa-5982-4899-9883-4d535bf80a97 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.291342] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f8fc3c6-457f-47d1-be06-b09bb7a829d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.313782] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 990.313782] env[62923]: value = "task-1370330" [ 990.313782] env[62923]: _type = "Task" [ 990.313782] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.314194] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 990.314194] env[62923]: value = "task-1370329" [ 990.314194] env[62923]: _type = "Task" [ 990.314194] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.328521] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.332184] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.506097] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1af8-48ec-ebf1-9b8b-a8d837079dda/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 990.506777] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b72196-e863-4359-b288-e2668df12e52 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.514012] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1af8-48ec-ebf1-9b8b-a8d837079dda/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 990.514012] env[62923]: ERROR oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1af8-48ec-ebf1-9b8b-a8d837079dda/disk-0.vmdk due to incomplete transfer. [ 990.514012] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-60e18205-a4a2-4b8e-81d0-ef3e75b55779 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.520889] env[62923]: DEBUG oslo_vmware.rw_handles [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f1af8-48ec-ebf1-9b8b-a8d837079dda/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 990.521099] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Uploaded image 51961a0b-7b31-4a1e-b2f1-1914040134b5 to the Glance image server {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 990.523353] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Destroying the VM {{(pid=62923) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 990.523599] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-18870395-ea8b-4d80-9e4c-9c374e4c2784 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.529556] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 990.529556] env[62923]: value = "task-1370331" [ 990.529556] env[62923]: _type = "Task" [ 990.529556] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.537389] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370331, 'name': Destroy_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.727034] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 990.727372] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 990.727372] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 990.748749] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410ea367-e631-441c-b596-9f7a6c837d42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.775056] env[62923]: DEBUG oslo_concurrency.lockutils [None req-16cee9c5-56b5-473a-a5a6-a09f15c70216 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.925s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.776334] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 0 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.841552] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073086} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.841552] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370330, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.842058] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.842849] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c42a25-6c5a-4823-8f07-f6d4777f3c9c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.865895] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.869681] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d1cf9f5-04c1-4614-931a-646903d74d93 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.892207] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 990.892207] env[62923]: value = "task-1370332" [ 990.892207] env[62923]: _type = "Task" [ 990.892207] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.898746] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.043690] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370331, 'name': Destroy_Task, 'duration_secs': 0.307829} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.044030] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Destroyed the VM [ 991.044506] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deleting Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 991.048572] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9b40e509-9d1a-4816-9702-d7b6621c1d9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.051081] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c695175c-7220-43aa-8f4a-f3317db5965a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.057936] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 991.057936] env[62923]: value = "task-1370333" [ 991.057936] env[62923]: _type = "Task" [ 991.057936] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.065872] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f56e973-dfdb-43ee-9d5d-0832802f3f8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.075772] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370333, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.107009] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de839c95-bb08-44f6-9631-478e3e244390 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.115115] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f751a7-099f-4bea-8de3-7a828caf5312 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.135165] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.284340] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.285612] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e42ccb8c-4ad1-44a1-89e8-75d54c993ec4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.302275] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 991.302275] env[62923]: value = "task-1370334" [ 991.302275] env[62923]: _type = "Task" [ 991.302275] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.313535] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.328544] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370330, 'name': ReconfigVM_Task, 'duration_secs': 0.740935} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.328883] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfigured VM instance instance-00000061 to attach disk [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.329600] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb9290b2-d511-47b6-b47c-ad38faee8a3f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.338659] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 991.338659] env[62923]: value = "task-1370335" [ 991.338659] env[62923]: _type = "Task" [ 991.338659] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.347836] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370335, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.408960] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370332, 'name': ReconfigVM_Task, 'duration_secs': 0.30894} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.408960] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82/1b155391-37d9-4186-b70d-84f2dec5af82.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 991.409225] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5ada946-0bc9-4651-b6a9-1ea1ba424f19 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.418675] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 991.418675] env[62923]: value = "task-1370336" [ 991.418675] env[62923]: _type = "Task" [ 991.418675] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.429827] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370336, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.569554] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370333, 'name': RemoveSnapshot_Task, 'duration_secs': 0.457878} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.570891] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deleted Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 991.571280] env[62923]: DEBUG nova.compute.manager [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.572465] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c960dc06-6d22-49b0-8948-726bfac9995d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.637888] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.815020] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370334, 'name': PowerOffVM_Task, 'duration_secs': 0.30668} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.815020] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.815020] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 17 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 991.849824] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370335, 'name': Rename_Task, 'duration_secs': 0.143955} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.849824] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.850294] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ee0183e-f732-41fc-b04a-389470917fa3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.856595] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 991.856595] env[62923]: value = "task-1370337" [ 991.856595] env[62923]: _type = "Task" [ 991.856595] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.865720] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.936443] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370336, 'name': Rename_Task, 'duration_secs': 0.201055} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.936443] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.936571] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76637851-1918-468f-b209-b36029722d16 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.943850] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 991.943850] env[62923]: value = "task-1370338" [ 991.943850] env[62923]: _type = "Task" [ 991.943850] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.953994] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.087531] env[62923]: INFO nova.compute.manager [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Shelve offloading [ 992.089788] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.090242] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1abf2bb1-125d-46c5-891c-e0a8a816f7c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.098813] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 992.098813] env[62923]: value = "task-1370339" [ 992.098813] env[62923]: _type = "Task" [ 992.098813] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.113944] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] VM already powered off {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 992.114313] env[62923]: DEBUG nova.compute.manager [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 992.115501] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed9f39b-92e0-4cf9-9026-4adae3f0971a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.124469] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.124839] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.125065] env[62923]: DEBUG nova.network.neutron [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.146514] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 992.146514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.974s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.146514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.146514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.146514] env[62923]: DEBUG nova.compute.manager [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 992.146943] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.946s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.147177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.149905] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.606s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.152587] env[62923]: INFO nova.compute.claims [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.157481] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c68d49b-a67f-4b58-a626-127112293099 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.169671] env[62923]: DEBUG nova.compute.manager [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 992.170228] env[62923]: DEBUG nova.objects.instance [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lazy-loading 'flavor' on Instance uuid 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.189743] env[62923]: INFO nova.scheduler.client.report [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted allocations for instance b145b71c-c56b-4872-bb61-fa3e65fef04f [ 992.319578] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 992.319836] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 992.319991] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.320223] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 992.320390] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.320555] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 992.321095] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 992.321095] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 992.321292] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 992.321351] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 992.321529] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 992.328146] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9f400f6-2ffd-4570-b7a1-095d84d6361c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.342359] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "63b16034-87f0-433f-b48c-0e936642534c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.342689] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "63b16034-87f0-433f-b48c-0e936642534c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.349492] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 992.349492] env[62923]: value = "task-1370340" [ 992.349492] env[62923]: _type = "Task" [ 992.349492] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.359307] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370340, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.369257] env[62923]: DEBUG oslo_vmware.api [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370337, 'name': PowerOnVM_Task, 'duration_secs': 0.481604} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.369654] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.369908] env[62923]: INFO nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Took 7.27 seconds to spawn the instance on the hypervisor. [ 992.370164] env[62923]: DEBUG nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 992.371131] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3713d8bd-1817-4953-8bfe-7b02ed5625ca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.457056] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370338, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.675715] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.676097] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e823617c-0431-461e-acbd-bfb7ae48039f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.687303] env[62923]: DEBUG oslo_vmware.api [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 992.687303] env[62923]: value = "task-1370341" [ 992.687303] env[62923]: _type = "Task" [ 992.687303] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.707780] env[62923]: DEBUG oslo_vmware.api [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.708469] env[62923]: DEBUG oslo_concurrency.lockutils [None req-adf93b67-e06a-4107-8bbd-604368e52441 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "b145b71c-c56b-4872-bb61-fa3e65fef04f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.356s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.850223] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 992.871441] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370340, 'name': ReconfigVM_Task, 'duration_secs': 0.308388} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.871441] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 33 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 992.897639] env[62923]: INFO nova.compute.manager [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Took 15.77 seconds to build instance. [ 992.941706] env[62923]: DEBUG nova.network.neutron [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap545dfb40-7a", "ovs_interfaceid": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.958963] env[62923]: DEBUG oslo_vmware.api [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370338, 'name': PowerOnVM_Task, 'duration_secs': 0.5627} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.959370] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.959596] env[62923]: DEBUG nova.compute.manager [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 992.960507] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c835d97-ae21-485f-a036-80cbd35192c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.204242] env[62923]: DEBUG oslo_vmware.api [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370341, 'name': PowerOffVM_Task, 'duration_secs': 0.401246} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.204989] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 993.205238] env[62923]: DEBUG nova.compute.manager [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.206127] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7093865e-3411-4373-ace6-b92f1fbdb340 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.374635] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.379693] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 993.379693] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 993.379693] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.379862] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 993.379957] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.380120] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 993.380326] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 993.381786] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 993.381786] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 993.381786] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 993.381786] env[62923]: DEBUG nova.virt.hardware [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.386828] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Reconfiguring VM instance instance-0000005e to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 993.390658] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78d0330d-31a6-4b06-a79b-a6c6947934c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.408374] env[62923]: DEBUG oslo_concurrency.lockutils [None req-566923b6-1297-4cda-a86f-839ca61f5a82 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.291s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.414891] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 993.414891] env[62923]: value = "task-1370342" [ 993.414891] env[62923]: _type = "Task" [ 993.414891] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.426770] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370342, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.441266] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb3816c-e7f2-4837-8dd2-d844315780fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.444835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.451861] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89e90b8-2cad-4ee1-8304-ad203cb28f66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.496818] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1898594b-df0b-4f0a-853a-ffa9ae360938 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.500044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.506930] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019de718-c888-436b-98e6-3bdab84f450b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.520448] env[62923]: DEBUG nova.compute.provider_tree [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.722288] env[62923]: DEBUG oslo_concurrency.lockutils [None req-57708c58-55c8-43f7-88ff-b52c02c80b8f tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.576s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.811056] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.811366] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.934841] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370342, 'name': ReconfigVM_Task, 'duration_secs': 0.200887} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.936152] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Reconfigured VM instance instance-0000005e to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 993.939249] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091de2cd-16bd-4ca9-b518-1a588d2ac883 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.964306] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28/92a10f0a-4bfd-405a-956e-3ea29a740b28.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.965914] env[62923]: DEBUG nova.compute.manager [req-348bd1cd-25d4-46dd-bbfc-15c19d155cef req-5060c09d-5393-44d1-a860-f80eb50b68c2 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-vif-unplugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.966158] env[62923]: DEBUG oslo_concurrency.lockutils [req-348bd1cd-25d4-46dd-bbfc-15c19d155cef req-5060c09d-5393-44d1-a860-f80eb50b68c2 service nova] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.966377] env[62923]: DEBUG oslo_concurrency.lockutils [req-348bd1cd-25d4-46dd-bbfc-15c19d155cef req-5060c09d-5393-44d1-a860-f80eb50b68c2 service nova] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.966558] env[62923]: DEBUG oslo_concurrency.lockutils [req-348bd1cd-25d4-46dd-bbfc-15c19d155cef req-5060c09d-5393-44d1-a860-f80eb50b68c2 service nova] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.966709] env[62923]: DEBUG nova.compute.manager [req-348bd1cd-25d4-46dd-bbfc-15c19d155cef req-5060c09d-5393-44d1-a860-f80eb50b68c2 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] No waiting events found dispatching network-vif-unplugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 993.966874] env[62923]: WARNING nova.compute.manager [req-348bd1cd-25d4-46dd-bbfc-15c19d155cef req-5060c09d-5393-44d1-a860-f80eb50b68c2 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received unexpected event network-vif-unplugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff for instance with vm_state shelved and task_state shelving_offloading. [ 993.967461] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e077440a-5faa-4f15-9a39-b154566f18f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.986936] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 993.986936] env[62923]: value = "task-1370343" [ 993.986936] env[62923]: _type = "Task" [ 993.986936] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.996535] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370343, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.028118] env[62923]: DEBUG nova.scheduler.client.report [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 994.085271] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.086244] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1677fc80-6f6a-471e-aed7-630bada194c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.093650] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 994.094016] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-818721d2-14e6-48f0-b05d-05365d68613e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.161395] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 994.161789] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 994.162589] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleting the datastore file [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 994.162589] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac312909-3d60-46d2-a065-8fef014280e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.170010] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 994.170010] env[62923]: value = "task-1370345" [ 994.170010] env[62923]: _type = "Task" [ 994.170010] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.179572] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.314069] env[62923]: INFO nova.compute.manager [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Detaching volume 47f39439-20e2-4392-8f87-83a201ccb62d [ 994.351257] env[62923]: INFO nova.virt.block_device [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Attempting to driver detach volume 47f39439-20e2-4392-8f87-83a201ccb62d from mountpoint /dev/sdb [ 994.351506] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 994.351694] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291490', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'name': 'volume-47f39439-20e2-4392-8f87-83a201ccb62d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '43065826-0f2b-48dc-bc42-8e0fd84fdcd3', 'attached_at': '', 'detached_at': '', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'serial': '47f39439-20e2-4392-8f87-83a201ccb62d'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 994.352589] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4d4e83-0649-467f-9d8d-e3f5715670c9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.374446] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30c534b-aa59-45d4-9cfe-dfb1cb50e9a5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.381744] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a32115-4586-46a2-9d29-0335d7b9ba87 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.402827] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f12976b-e97d-40bd-b943-794f8ee9ef8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.418497] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] The volume has not been displaced from its original location: [datastore1] volume-47f39439-20e2-4392-8f87-83a201ccb62d/volume-47f39439-20e2-4392-8f87-83a201ccb62d.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 994.423351] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 994.423994] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e51521e0-aab3-44c3-bce7-d706b9e64f31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.442673] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 994.442673] env[62923]: value = "task-1370346" [ 994.442673] env[62923]: _type = "Task" [ 994.442673] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.443112] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1b155391-37d9-4186-b70d-84f2dec5af82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.443345] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1b155391-37d9-4186-b70d-84f2dec5af82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.443544] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1b155391-37d9-4186-b70d-84f2dec5af82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.443725] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1b155391-37d9-4186-b70d-84f2dec5af82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.443892] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1b155391-37d9-4186-b70d-84f2dec5af82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.449592] env[62923]: INFO nova.compute.manager [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Terminating instance [ 994.456473] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370346, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.457081] env[62923]: DEBUG nova.compute.manager [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 994.457281] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.458101] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774b33f6-be88-4f56-96b2-0751d7fed2ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.464607] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.464863] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10e1f98e-dede-4a11-9c6f-dc78234bca78 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.469967] env[62923]: DEBUG oslo_vmware.api [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 994.469967] env[62923]: value = "task-1370347" [ 994.469967] env[62923]: _type = "Task" [ 994.469967] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.477968] env[62923]: DEBUG oslo_vmware.api [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.495204] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370343, 'name': ReconfigVM_Task, 'duration_secs': 0.271477} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.495516] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28/92a10f0a-4bfd-405a-956e-3ea29a740b28.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.495841] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 50 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.533402] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.537160] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 994.537160] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.511s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.537160] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.540433] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.168s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.541979] env[62923]: INFO nova.compute.claims [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.572726] env[62923]: INFO nova.scheduler.client.report [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocations for instance 2a9a93f8-9398-4a19-a149-a1092ceb416d [ 994.679519] env[62923]: DEBUG oslo_vmware.api [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155346} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.679796] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.679985] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 994.680568] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 994.697103] env[62923]: INFO nova.scheduler.client.report [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted allocations for instance 75f9473f-ca67-4bb5-8663-0ce3709885e9 [ 994.845979] env[62923]: DEBUG nova.compute.manager [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Received event network-changed-35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.846389] env[62923]: DEBUG nova.compute.manager [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Refreshing instance network info cache due to event network-changed-35f893d5-3b23-4350-92a9-e3803a075eb0. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.847856] env[62923]: DEBUG oslo_concurrency.lockutils [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] Acquiring lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.847856] env[62923]: DEBUG oslo_concurrency.lockutils [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] Acquired lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.847856] env[62923]: DEBUG nova.network.neutron [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Refreshing network info cache for port 35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.955267] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370346, 'name': ReconfigVM_Task, 'duration_secs': 0.272595} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.955512] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 994.961382] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a55924ff-9b7c-4f33-9691-d12397641d08 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.987565] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 994.987565] env[62923]: value = "task-1370348" [ 994.987565] env[62923]: _type = "Task" [ 994.987565] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.987845] env[62923]: DEBUG oslo_vmware.api [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370347, 'name': PowerOffVM_Task, 'duration_secs': 0.197207} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.988229] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.988395] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 994.991618] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b156f9da-314b-4f4d-ac76-6eed5ec7c703 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.998706] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370348, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.005285] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c71d09-827d-4245-8c95-ce8eef57138e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.025753] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8634780e-827f-4c33-bc25-dd14e79c1abe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.049574] env[62923]: DEBUG nova.compute.utils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 995.053161] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 67 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 995.058302] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 995.059132] env[62923]: DEBUG nova.network.neutron [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.062929] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.062929] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.063203] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleting the datastore file [datastore2] 1b155391-37d9-4186-b70d-84f2dec5af82 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.063566] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de317db7-b94d-4a99-aa50-2399827f050e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.070913] env[62923]: DEBUG oslo_vmware.api [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 995.070913] env[62923]: value = "task-1370350" [ 995.070913] env[62923]: _type = "Task" [ 995.070913] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.084572] env[62923]: DEBUG oslo_vmware.api [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.085069] env[62923]: DEBUG oslo_concurrency.lockutils [None req-317e102c-1d63-460d-b98d-01ee1ea06a12 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "2a9a93f8-9398-4a19-a149-a1092ceb416d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.029s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.111552] env[62923]: DEBUG nova.policy [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '68e62d519b19448c8cac7f1b2e55a087', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3b09245b63144e9bbcb2262aef33a21', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 995.204045] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.386625] env[62923]: DEBUG nova.network.neutron [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Successfully created port: 30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.410211] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.410501] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.410869] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.410966] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.411091] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.413051] env[62923]: INFO nova.compute.manager [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Terminating instance [ 995.414963] env[62923]: DEBUG nova.compute.manager [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 995.415041] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.415914] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada121fd-44aa-47db-a4e5-afff4e8ea0b0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.426346] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.426565] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9005b429-bd53-4a65-86db-bb740fea920e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.484950] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.485192] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.485375] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleting the datastore file [datastore1] 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.485675] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efe74922-9560-47a4-8c69-f3a43382ccba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.493556] env[62923]: DEBUG oslo_vmware.api [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for the task: (returnval){ [ 995.493556] env[62923]: value = "task-1370352" [ 995.493556] env[62923]: _type = "Task" [ 995.493556] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.500679] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370348, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.505263] env[62923]: DEBUG oslo_vmware.api [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.525278] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "d1026124-821b-44c1-b1f6-257597ce1195" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.525542] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "d1026124-821b-44c1-b1f6-257597ce1195" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.558977] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 995.583289] env[62923]: DEBUG oslo_vmware.api [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194074} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.584321] env[62923]: DEBUG nova.network.neutron [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updated VIF entry in instance network info cache for port 35f893d5-3b23-4350-92a9-e3803a075eb0. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.584616] env[62923]: DEBUG nova.network.neutron [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updating instance_info_cache with network_info: [{"id": "35f893d5-3b23-4350-92a9-e3803a075eb0", "address": "fa:16:3e:bb:43:f6", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35f893d5-3b", "ovs_interfaceid": "35f893d5-3b23-4350-92a9-e3803a075eb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.585951] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.586193] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.586415] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.586600] env[62923]: INFO nova.compute.manager [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Took 1.13 seconds to destroy the instance on the hypervisor. [ 995.586881] env[62923]: DEBUG oslo.service.loopingcall [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.590050] env[62923]: DEBUG nova.compute.manager [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 995.590169] env[62923]: DEBUG nova.network.neutron [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.617322] env[62923]: DEBUG nova.network.neutron [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Port 990e9014-0a5e-465f-8306-404937c589e0 binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 995.814801] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3182a7-d09e-42e4-b3ba-01b0824b4d31 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.822622] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56783ab2-0dac-408d-a8a6-385a259f9f7e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.866093] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2053a756-b241-4e4c-b34a-7d21518853de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.877253] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74dd9a0-aa63-4c6e-bfe2-156dffd344f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.893946] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 995.983806] env[62923]: DEBUG nova.compute.manager [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.984269] env[62923]: DEBUG nova.compute.manager [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing instance network info cache due to event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 995.984343] env[62923]: DEBUG oslo_concurrency.lockutils [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.984466] env[62923]: DEBUG oslo_concurrency.lockutils [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.984625] env[62923]: DEBUG nova.network.neutron [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.000634] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370348, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.006315] env[62923]: DEBUG oslo_vmware.api [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Task: {'id': task-1370352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128239} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.006419] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.006564] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.006743] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.006917] env[62923]: INFO nova.compute.manager [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Took 0.59 seconds to destroy the instance on the hypervisor. [ 996.007255] env[62923]: DEBUG oslo.service.loopingcall [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.008191] env[62923]: DEBUG nova.compute.manager [-] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 996.008191] env[62923]: DEBUG nova.network.neutron [-] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.027735] env[62923]: DEBUG nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 996.090878] env[62923]: DEBUG oslo_concurrency.lockutils [req-57db4c8e-a975-4d35-ad20-3243fbb89fb5 req-14d64f8b-14f5-4e43-a0aa-4650ce082a91 service nova] Releasing lock "refresh_cache-b459a438-c287-4fbd-80f5-b5d3c31b83c9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.372259] env[62923]: DEBUG nova.network.neutron [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.420550] env[62923]: ERROR nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [req-beea2dc2-dd6e-4118-94f4-9b1fbf468880] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-beea2dc2-dd6e-4118-94f4-9b1fbf468880"}]} [ 996.436489] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 996.450307] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 996.450540] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.461701] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 996.479617] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 996.501729] env[62923]: DEBUG oslo_vmware.api [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370348, 'name': ReconfigVM_Task, 'duration_secs': 1.164406} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.502111] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291490', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'name': 'volume-47f39439-20e2-4392-8f87-83a201ccb62d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '43065826-0f2b-48dc-bc42-8e0fd84fdcd3', 'attached_at': '', 'detached_at': '', 'volume_id': '47f39439-20e2-4392-8f87-83a201ccb62d', 'serial': '47f39439-20e2-4392-8f87-83a201ccb62d'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 996.551491] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.570605] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 996.591760] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 996.592023] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 996.592186] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.592373] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 996.592524] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.592673] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 996.593139] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 996.593139] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 996.593241] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 996.593366] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 996.593538] env[62923]: DEBUG nova.virt.hardware [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 996.594457] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103c0bec-58de-4234-919d-503cceddec18 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.606340] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00a5fbe-f832-4234-a026-e5fb5698a36f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.636498] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fd14bc-5286-4200-a47a-e59e67093ee7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.647571] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.647793] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.647967] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.653714] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8402ec1-f308-4cc4-acd8-942b020ddfa3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.687354] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8058a5e9-e9fd-4557-b390-ed411ecdb2cf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.695216] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ddf18f-d737-4f2e-81e8-873e2c7da56a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.709402] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.725770] env[62923]: DEBUG nova.network.neutron [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updated VIF entry in instance network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 996.726168] env[62923]: DEBUG nova.network.neutron [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap545dfb40-7a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.850495] env[62923]: DEBUG nova.network.neutron [-] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.876602] env[62923]: INFO nova.compute.manager [-] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Took 1.29 seconds to deallocate network for instance. [ 996.878012] env[62923]: DEBUG nova.compute.manager [req-c3a30a4a-556c-4f8d-a04a-e680ef83e065 req-af9f910b-4f82-4a3d-bfe4-d2cc75bd477d service nova] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Received event network-vif-deleted-fd1dcc11-df32-408e-9548-4faf2556a924 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.878262] env[62923]: DEBUG nova.compute.manager [req-c3a30a4a-556c-4f8d-a04a-e680ef83e065 req-af9f910b-4f82-4a3d-bfe4-d2cc75bd477d service nova] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Received event network-vif-deleted-032aec56-6f4e-4f4c-8c78-8810c6ce2b07 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.574441] env[62923]: DEBUG nova.network.neutron [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Successfully updated port: 30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.576839] env[62923]: DEBUG nova.objects.instance [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'flavor' on Instance uuid 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.578556] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.582416] env[62923]: DEBUG oslo_concurrency.lockutils [req-58827e59-f681-47a8-80df-fb8cd8ac61ff req-492249ff-3d5b-4bbf-a007-7d558cfcb54c service nova] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.582810] env[62923]: INFO nova.compute.manager [-] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Took 1.58 seconds to deallocate network for instance. [ 997.607973] env[62923]: ERROR nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [req-c8c0f15e-b5c5-44cc-9329-161a63603f13] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c8c0f15e-b5c5-44cc-9329-161a63603f13"}]} [ 997.625764] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 997.646830] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.647060] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.647251] env[62923]: DEBUG nova.network.neutron [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.649824] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 997.650091] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.660639] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 997.677391] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 997.811952] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e79a20-51b5-452f-ad76-9419239dd982 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.819516] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62df11b0-48d3-48b2-af90-e1da47d73128 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.849187] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ed092b-f293-4b4b-bb3f-b33e642e93f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.856486] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99cb401-13c9-497c-b8a1-eeb89776c8bc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.869485] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.010310] env[62923]: DEBUG nova.compute.manager [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Received event network-vif-plugged-30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 998.010523] env[62923]: DEBUG oslo_concurrency.lockutils [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] Acquiring lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.010726] env[62923]: DEBUG oslo_concurrency.lockutils [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.010892] env[62923]: DEBUG oslo_concurrency.lockutils [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.011066] env[62923]: DEBUG nova.compute.manager [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] No waiting events found dispatching network-vif-plugged-30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 998.011232] env[62923]: WARNING nova.compute.manager [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Received unexpected event network-vif-plugged-30f8b729-5714-40d3-8c21-f0662d7104c5 for instance with vm_state building and task_state spawning. [ 998.011389] env[62923]: DEBUG nova.compute.manager [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Received event network-changed-30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 998.011540] env[62923]: DEBUG nova.compute.manager [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Refreshing instance network info cache due to event network-changed-30f8b729-5714-40d3-8c21-f0662d7104c5. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 998.011715] env[62923]: DEBUG oslo_concurrency.lockutils [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] Acquiring lock "refresh_cache-cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.011848] env[62923]: DEBUG oslo_concurrency.lockutils [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] Acquired lock "refresh_cache-cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.012032] env[62923]: DEBUG nova.network.neutron [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Refreshing network info cache for port 30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.091640] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "refresh_cache-cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.097272] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.103337] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.348561] env[62923]: DEBUG nova.network.neutron [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [{"id": "990e9014-0a5e-465f-8306-404937c589e0", "address": "fa:16:3e:1f:14:02", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap990e9014-0a", "ovs_interfaceid": "990e9014-0a5e-465f-8306-404937c589e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.396719] env[62923]: DEBUG nova.scheduler.client.report [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 998.397039] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 125 to 126 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 998.397199] env[62923]: DEBUG nova.compute.provider_tree [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.541672] env[62923]: DEBUG nova.network.neutron [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.594917] env[62923]: DEBUG oslo_concurrency.lockutils [None req-fbb58116-ade1-4725-bed6-cbc86d9ae9ca tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.783s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.627925] env[62923]: DEBUG nova.network.neutron [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.851472] env[62923]: DEBUG oslo_concurrency.lockutils [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.901736] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.361s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.902290] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 998.904784] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.405s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.904941] env[62923]: DEBUG nova.objects.instance [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 999.129932] env[62923]: DEBUG oslo_concurrency.lockutils [req-5f53b28e-e60d-418e-aa9c-a1c086381a76 req-3520a259-0807-4eb6-88ad-ba3842b00f85 service nova] Releasing lock "refresh_cache-cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.130391] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "refresh_cache-cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.130579] env[62923]: DEBUG nova.network.neutron [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.374380] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffdf317-4646-4a0f-8f68-9533c346c8e0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.392733] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9016642-d0c1-46d3-adc3-661489efa618 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.399586] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 83 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 999.411975] env[62923]: DEBUG nova.compute.utils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.413263] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 999.413461] env[62923]: DEBUG nova.network.neutron [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.445335] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.445335] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.445335] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.445335] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.445637] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.447850] env[62923]: INFO nova.compute.manager [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Terminating instance [ 999.449799] env[62923]: DEBUG nova.compute.manager [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 999.449992] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.450808] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16b487b-17cc-40db-aff9-bc8da3c51f49 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.454571] env[62923]: DEBUG nova.policy [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '120406e8c20c4bcc800edffadb3b9321', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7201d5b779d249aeacae9003db093552', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 999.460978] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.461231] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ea96a1f-7bca-44ed-adee-4797e9ac1813 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.467460] env[62923]: DEBUG oslo_vmware.api [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 999.467460] env[62923]: value = "task-1370353" [ 999.467460] env[62923]: _type = "Task" [ 999.467460] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.474801] env[62923]: DEBUG oslo_vmware.api [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.680699] env[62923]: DEBUG nova.network.neutron [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 999.707487] env[62923]: DEBUG nova.network.neutron [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Successfully created port: c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.898711] env[62923]: DEBUG nova.network.neutron [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Updating instance_info_cache with network_info: [{"id": "30f8b729-5714-40d3-8c21-f0662d7104c5", "address": "fa:16:3e:50:87:2d", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f8b729-57", "ovs_interfaceid": "30f8b729-5714-40d3-8c21-f0662d7104c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.905231] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.905546] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f114d9d0-fb82-4b3e-8662-a9191aa5061a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.914280] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5f42149c-3251-4568-9020-4b5e1e73dd46 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.915380] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 999.915380] env[62923]: value = "task-1370354" [ 999.915380] env[62923]: _type = "Task" [ 999.915380] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.915812] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.712s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.916054] env[62923]: DEBUG nova.objects.instance [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'resources' on Instance uuid 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.918012] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 999.935607] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370354, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.980988] env[62923]: DEBUG oslo_vmware.api [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370353, 'name': PowerOffVM_Task, 'duration_secs': 0.218155} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.983140] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.983140] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 999.983140] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3aca1ff4-a720-4be4-bed1-38054a6ec699 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.049067] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.049067] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.049067] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleting the datastore file [datastore2] 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.049657] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcc092e1-8192-4a73-8a34-57f8816a18c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.057272] env[62923]: DEBUG oslo_vmware.api [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1000.057272] env[62923]: value = "task-1370356" [ 1000.057272] env[62923]: _type = "Task" [ 1000.057272] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.065236] env[62923]: DEBUG oslo_vmware.api [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.401766] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "refresh_cache-cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.402126] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Instance network_info: |[{"id": "30f8b729-5714-40d3-8c21-f0662d7104c5", "address": "fa:16:3e:50:87:2d", "network": {"id": "fc5929c8-fd33-4434-b4f6-6f77c7ea46cc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1972639428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3b09245b63144e9bbcb2262aef33a21", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f8b729-57", "ovs_interfaceid": "30f8b729-5714-40d3-8c21-f0662d7104c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1000.402585] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:87:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91712705-510f-41a0-a803-2ecd92b676e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30f8b729-5714-40d3-8c21-f0662d7104c5', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.410664] env[62923]: DEBUG oslo.service.loopingcall [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.410929] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.411179] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d423ce4-4ac9-4e18-8387-762c510a96f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.430502] env[62923]: DEBUG nova.objects.instance [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'numa_topology' on Instance uuid 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.446839] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370354, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.446839] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.446839] env[62923]: value = "task-1370357" [ 1000.446839] env[62923]: _type = "Task" [ 1000.446839] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.452954] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370357, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.476123] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.476457] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.566478] env[62923]: DEBUG oslo_vmware.api [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135718} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.566739] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.566912] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.567100] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.567274] env[62923]: INFO nova.compute.manager [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1000.567507] env[62923]: DEBUG oslo.service.loopingcall [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1000.567697] env[62923]: DEBUG nova.compute.manager [-] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1000.567785] env[62923]: DEBUG nova.network.neutron [-] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1000.934342] env[62923]: DEBUG nova.objects.base [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Object Instance<75f9473f-ca67-4bb5-8663-0ce3709885e9> lazy-loaded attributes: resources,numa_topology {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1000.938978] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1000.944069] env[62923]: DEBUG oslo_vmware.api [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370354, 'name': PowerOnVM_Task, 'duration_secs': 0.558998} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.944318] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.944449] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-82842ca6-1b55-4d0e-b960-eb4f91fadb2c tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance '92a10f0a-4bfd-405a-956e-3ea29a740b28' progress to 100 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1000.957040] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370357, 'name': CreateVM_Task, 'duration_secs': 0.299291} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.957313] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.957973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.958157] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.958476] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1000.958720] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3fd347d-5cdf-4cd2-8b3f-827912f094f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.965499] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1000.965499] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52285781-0ed8-ba50-96e9-a8ac7d87d73e" [ 1000.965499] env[62923]: _type = "Task" [ 1000.965499] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.975956] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52285781-0ed8-ba50-96e9-a8ac7d87d73e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.978180] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1000.978408] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1000.978560] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.978739] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1000.978880] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.979056] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1000.979276] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1000.979431] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1000.979590] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1000.979744] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1000.979905] env[62923]: DEBUG nova.virt.hardware [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1000.980589] env[62923]: INFO nova.compute.manager [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Detaching volume f98451ba-8f2b-4010-bb20-e6959423a29c [ 1000.982696] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49d997a-1f40-4af2-88ce-9a934b396d43 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.994712] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1dac0ef-8998-416d-a0b4-e8c894d84ade {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.039922] env[62923]: INFO nova.virt.block_device [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Attempting to driver detach volume f98451ba-8f2b-4010-bb20-e6959423a29c from mountpoint /dev/sdb [ 1001.040175] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1001.040364] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291505', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'name': 'volume-f98451ba-8f2b-4010-bb20-e6959423a29c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'serial': 'f98451ba-8f2b-4010-bb20-e6959423a29c'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1001.041227] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d72418f-a446-4965-b7d8-62bac461bb1f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.070422] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb3fe8f-465b-4be1-9599-b417ab0dd508 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.078741] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79930a87-dcff-4a68-b475-0970e5bfbce6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.105032] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d461e210-899f-4f96-a7d2-af42d06deff9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.110297] env[62923]: DEBUG nova.compute.manager [req-9284d2fa-1e54-4abc-b393-a2eb03a83f11 req-594cb975-a03b-4065-af7b-6553e61dbfc0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Received event network-vif-deleted-4ba4e87a-6f39-4b74-87b4-12b093d28f4a {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.110584] env[62923]: INFO nova.compute.manager [req-9284d2fa-1e54-4abc-b393-a2eb03a83f11 req-594cb975-a03b-4065-af7b-6553e61dbfc0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Neutron deleted interface 4ba4e87a-6f39-4b74-87b4-12b093d28f4a; detaching it from the instance and deleting it from the info cache [ 1001.110679] env[62923]: DEBUG nova.network.neutron [req-9284d2fa-1e54-4abc-b393-a2eb03a83f11 req-594cb975-a03b-4065-af7b-6553e61dbfc0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.128834] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] The volume has not been displaced from its original location: [datastore2] volume-f98451ba-8f2b-4010-bb20-e6959423a29c/volume-f98451ba-8f2b-4010-bb20-e6959423a29c.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1001.135068] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1001.138707] env[62923]: DEBUG nova.compute.manager [req-a662a3bd-a455-4989-a47f-b98b6a26c5a9 req-e635814c-0e7e-4f25-a320-d465799be392 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Received event network-vif-plugged-c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.138991] env[62923]: DEBUG oslo_concurrency.lockutils [req-a662a3bd-a455-4989-a47f-b98b6a26c5a9 req-e635814c-0e7e-4f25-a320-d465799be392 service nova] Acquiring lock "63b16034-87f0-433f-b48c-0e936642534c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.139339] env[62923]: DEBUG oslo_concurrency.lockutils [req-a662a3bd-a455-4989-a47f-b98b6a26c5a9 req-e635814c-0e7e-4f25-a320-d465799be392 service nova] Lock "63b16034-87f0-433f-b48c-0e936642534c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.139639] env[62923]: DEBUG oslo_concurrency.lockutils [req-a662a3bd-a455-4989-a47f-b98b6a26c5a9 req-e635814c-0e7e-4f25-a320-d465799be392 service nova] Lock "63b16034-87f0-433f-b48c-0e936642534c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.139932] env[62923]: DEBUG nova.compute.manager [req-a662a3bd-a455-4989-a47f-b98b6a26c5a9 req-e635814c-0e7e-4f25-a320-d465799be392 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] No waiting events found dispatching network-vif-plugged-c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1001.140203] env[62923]: WARNING nova.compute.manager [req-a662a3bd-a455-4989-a47f-b98b6a26c5a9 req-e635814c-0e7e-4f25-a320-d465799be392 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Received unexpected event network-vif-plugged-c844f692-0b8e-41c0-b17b-0259ef7ee633 for instance with vm_state building and task_state spawning. [ 1001.140901] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2fbd4a1-32bc-44b1-8e7b-1b171d79b46e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.160755] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1001.160755] env[62923]: value = "task-1370358" [ 1001.160755] env[62923]: _type = "Task" [ 1001.160755] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.169554] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.215617] env[62923]: DEBUG nova.network.neutron [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Successfully updated port: c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.230034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a100a16c-fdc0-474d-b26b-7e16204ff715 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.238841] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64996bd-17b2-498c-b183-998f96d7790a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.269062] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da200dce-616f-4424-8ada-e840b4e1f064 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.278172] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc14bf19-e656-4c92-b7da-7bd918ff4fc1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.292628] env[62923]: DEBUG nova.compute.provider_tree [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.478877] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52285781-0ed8-ba50-96e9-a8ac7d87d73e, 'name': SearchDatastore_Task, 'duration_secs': 0.027351} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.478877] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.479156] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.479307] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.479469] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.479672] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.479951] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-022514d1-b549-4e79-ad5f-5d86d41b8f19 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.489318] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.489318] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.490477] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62df2683-ce80-4101-b17a-2e3d1974b66d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.495117] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1001.495117] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52583d9d-a61b-3056-e517-c4b082366492" [ 1001.495117] env[62923]: _type = "Task" [ 1001.495117] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.506166] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52583d9d-a61b-3056-e517-c4b082366492, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.575978] env[62923]: DEBUG nova.network.neutron [-] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.613536] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-529929f2-b87e-437d-b30b-9ef3a0f2657e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.623306] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba24a8f0-ff2e-4cb6-ac67-20af9419a10b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.649492] env[62923]: DEBUG nova.compute.manager [req-9284d2fa-1e54-4abc-b393-a2eb03a83f11 req-594cb975-a03b-4065-af7b-6553e61dbfc0 service nova] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Detach interface failed, port_id=4ba4e87a-6f39-4b74-87b4-12b093d28f4a, reason: Instance 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1001.669749] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370358, 'name': ReconfigVM_Task, 'duration_secs': 0.377887} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.670035] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1001.674561] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20680aef-ec5f-4120-91e2-409d6f6de973 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.690036] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1001.690036] env[62923]: value = "task-1370359" [ 1001.690036] env[62923]: _type = "Task" [ 1001.690036] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.697253] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.722355] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "refresh_cache-63b16034-87f0-433f-b48c-0e936642534c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.722612] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquired lock "refresh_cache-63b16034-87f0-433f-b48c-0e936642534c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.722865] env[62923]: DEBUG nova.network.neutron [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.795209] env[62923]: DEBUG nova.scheduler.client.report [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.005139] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52583d9d-a61b-3056-e517-c4b082366492, 'name': SearchDatastore_Task, 'duration_secs': 0.015977} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.006567] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99475a76-1be3-40c5-bb2d-444385beb98b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.011725] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1002.011725] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bdd64-a25a-3126-8929-2d1b0d598f29" [ 1002.011725] env[62923]: _type = "Task" [ 1002.011725] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.021258] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bdd64-a25a-3126-8929-2d1b0d598f29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.078348] env[62923]: INFO nova.compute.manager [-] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Took 1.51 seconds to deallocate network for instance. [ 1002.200656] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370359, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.257469] env[62923]: DEBUG nova.network.neutron [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1002.300399] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.384s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.302934] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.752s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.304699] env[62923]: INFO nova.compute.claims [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1002.393729] env[62923]: DEBUG nova.network.neutron [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Updating instance_info_cache with network_info: [{"id": "c844f692-0b8e-41c0-b17b-0259ef7ee633", "address": "fa:16:3e:24:08:2d", "network": {"id": "7c242ece-0cb0-431f-aa53-81ae03127129", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-618372671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7201d5b779d249aeacae9003db093552", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc844f692-0b", "ovs_interfaceid": "c844f692-0b8e-41c0-b17b-0259ef7ee633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.522230] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bdd64-a25a-3126-8929-2d1b0d598f29, 'name': SearchDatastore_Task, 'duration_secs': 0.008906} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.522452] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.522710] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0/cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.522961] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aed124aa-094b-435e-a473-1aba0668967e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.529973] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1002.529973] env[62923]: value = "task-1370360" [ 1002.529973] env[62923]: _type = "Task" [ 1002.529973] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.537710] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.584646] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.701314] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370359, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.814008] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bae929c1-7877-42ce-8bc0-f6f9a01ca197 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.912s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.814494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.711s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.814494] env[62923]: INFO nova.compute.manager [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Unshelving [ 1002.896195] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Releasing lock "refresh_cache-63b16034-87f0-433f-b48c-0e936642534c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.896548] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Instance network_info: |[{"id": "c844f692-0b8e-41c0-b17b-0259ef7ee633", "address": "fa:16:3e:24:08:2d", "network": {"id": "7c242ece-0cb0-431f-aa53-81ae03127129", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-618372671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7201d5b779d249aeacae9003db093552", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc844f692-0b", "ovs_interfaceid": "c844f692-0b8e-41c0-b17b-0259ef7ee633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1002.897150] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:08:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91c1da19-ab68-4127-bacd-accbaff19651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c844f692-0b8e-41c0-b17b-0259ef7ee633', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1002.904817] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Creating folder: Project (7201d5b779d249aeacae9003db093552). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1002.905205] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfeaac81-67db-4ba9-819f-5469631d4528 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.917359] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Created folder: Project (7201d5b779d249aeacae9003db093552) in parent group-v291405. [ 1002.917564] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Creating folder: Instances. Parent ref: group-v291519. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1002.917828] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf4e6acb-22fa-410e-8379-50674bf550b4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.927673] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Created folder: Instances in parent group-v291519. [ 1002.927933] env[62923]: DEBUG oslo.service.loopingcall [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.928158] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1002.928448] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-874baa0c-882a-4972-bebe-44c38dda4ed6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.947873] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1002.947873] env[62923]: value = "task-1370363" [ 1002.947873] env[62923]: _type = "Task" [ 1002.947873] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.956141] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370363, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.040942] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370360, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.156608] env[62923]: DEBUG nova.compute.manager [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Received event network-changed-c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.156869] env[62923]: DEBUG nova.compute.manager [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Refreshing instance network info cache due to event network-changed-c844f692-0b8e-41c0-b17b-0259ef7ee633. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1003.157185] env[62923]: DEBUG oslo_concurrency.lockutils [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] Acquiring lock "refresh_cache-63b16034-87f0-433f-b48c-0e936642534c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.157378] env[62923]: DEBUG oslo_concurrency.lockutils [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] Acquired lock "refresh_cache-63b16034-87f0-433f-b48c-0e936642534c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.157595] env[62923]: DEBUG nova.network.neutron [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Refreshing network info cache for port c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.201046] env[62923]: DEBUG oslo_vmware.api [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370359, 'name': ReconfigVM_Task, 'duration_secs': 1.189838} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.201266] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291505', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'name': 'volume-f98451ba-8f2b-4010-bb20-e6959423a29c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7', 'attached_at': '', 'detached_at': '', 'volume_id': 'f98451ba-8f2b-4010-bb20-e6959423a29c', 'serial': 'f98451ba-8f2b-4010-bb20-e6959423a29c'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1003.235558] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.235667] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.235787] env[62923]: DEBUG nova.compute.manager [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Going to confirm migration 3 {{(pid=62923) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1003.458634] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370363, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.470662] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e20fe3-4e4c-404d-bb45-096d6acd4a07 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.477262] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef4dfed-7f39-4088-b184-5acd40d51180 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.506637] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b62314e-8718-41f8-860b-342c1cf18d6f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.513327] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde0ac68-09d9-45f1-9c5f-a079ce301f5c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.527373] env[62923]: DEBUG nova.compute.provider_tree [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.538519] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528168} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.538753] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0/cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.538960] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.539207] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ff2b7b9-e970-451c-b1a7-238389b28b54 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.545819] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1003.545819] env[62923]: value = "task-1370364" [ 1003.545819] env[62923]: _type = "Task" [ 1003.545819] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.553734] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.742210] env[62923]: DEBUG nova.objects.instance [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.782458] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.782648] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquired lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.782839] env[62923]: DEBUG nova.network.neutron [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.783050] env[62923]: DEBUG nova.objects.instance [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'info_cache' on Instance uuid 92a10f0a-4bfd-405a-956e-3ea29a740b28 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.823236] env[62923]: DEBUG nova.compute.utils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1003.880994] env[62923]: DEBUG nova.network.neutron [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Updated VIF entry in instance network info cache for port c844f692-0b8e-41c0-b17b-0259ef7ee633. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.881402] env[62923]: DEBUG nova.network.neutron [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Updating instance_info_cache with network_info: [{"id": "c844f692-0b8e-41c0-b17b-0259ef7ee633", "address": "fa:16:3e:24:08:2d", "network": {"id": "7c242ece-0cb0-431f-aa53-81ae03127129", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-618372671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7201d5b779d249aeacae9003db093552", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91c1da19-ab68-4127-bacd-accbaff19651", "external-id": "nsx-vlan-transportzone-319", "segmentation_id": 319, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc844f692-0b", "ovs_interfaceid": "c844f692-0b8e-41c0-b17b-0259ef7ee633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.960148] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370363, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.030714] env[62923]: DEBUG nova.scheduler.client.report [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.055582] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.24104} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.055926] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.056601] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a766a4b9-a399-4188-a936-a2e6deaa5e9e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.077418] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0/cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.077644] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b44ff0ba-6caa-46af-aa60-ba408681af47 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.095735] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1004.095735] env[62923]: value = "task-1370365" [ 1004.095735] env[62923]: _type = "Task" [ 1004.095735] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.106029] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370365, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.326631] env[62923]: INFO nova.virt.block_device [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Booting with volume 7c87212b-d640-4716-ace2-030c4b7ed621 at /dev/sdb [ 1004.360023] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e05c20ef-44e1-44d2-b307-235815741c20 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.368070] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615feb44-cd8b-4265-928c-15e9141812c0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.383509] env[62923]: DEBUG oslo_concurrency.lockutils [req-dfffccef-b902-49dc-8685-c8e6279a8985 req-c6f0fa12-349f-4799-8478-021db2dfadb0 service nova] Releasing lock "refresh_cache-63b16034-87f0-433f-b48c-0e936642534c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.393954] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4c554fd-3185-43cb-9f7d-16ded4137c0b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.401691] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3748bdd1-3e64-4fb8-9025-db1018614c35 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.427352] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f97a91-41b9-4bf5-86ff-a4c45b279aa6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.433691] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d80ee69-c173-4763-9afb-284b6128097e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.447351] env[62923]: DEBUG nova.virt.block_device [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating existing volume attachment record: 6c5a7691-dca0-42c6-ab9f-9ffae3a77180 {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1004.458914] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370363, 'name': CreateVM_Task, 'duration_secs': 1.056743} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.459093] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.459801] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.459970] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.460331] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.460587] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5114889-43e3-4cdc-97e0-bad392db188f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.465116] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1004.465116] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521c3a09-0691-f94b-8f88-484f05f4ab7c" [ 1004.465116] env[62923]: _type = "Task" [ 1004.465116] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.472209] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521c3a09-0691-f94b-8f88-484f05f4ab7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.536520] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.537105] env[62923]: DEBUG nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1004.539769] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.961s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.539996] env[62923]: DEBUG nova.objects.instance [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'resources' on Instance uuid 1b155391-37d9-4186-b70d-84f2dec5af82 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.605445] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370365, 'name': ReconfigVM_Task, 'duration_secs': 0.275104} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.605742] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Reconfigured VM instance instance-00000062 to attach disk [datastore2] cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0/cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.606567] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3447e3dd-bc94-4f3a-89a0-563e00be304b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.612362] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1004.612362] env[62923]: value = "task-1370367" [ 1004.612362] env[62923]: _type = "Task" [ 1004.612362] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.620722] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370367, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.703224] env[62923]: DEBUG oslo_concurrency.lockutils [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.752092] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bb343c8a-ef05-4837-9cf9-6857d433c57c tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.275s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.753255] env[62923]: DEBUG oslo_concurrency.lockutils [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.050s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.753448] env[62923]: DEBUG nova.compute.manager [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1004.754667] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b088118f-7c2e-46ab-b1af-3f152e043cb8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.760928] env[62923]: DEBUG nova.compute.manager [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1004.761528] env[62923]: DEBUG nova.objects.instance [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.978465] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521c3a09-0691-f94b-8f88-484f05f4ab7c, 'name': SearchDatastore_Task, 'duration_secs': 0.044666} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.978804] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.979061] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.979316] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.979472] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.979647] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.979938] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc94173f-5f85-4725-98c4-20a52a0ae887 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.989128] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.989332] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.993026] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-678e18b6-e865-4eb1-ad56-91aee009707a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.999093] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1004.999093] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527e792a-4767-153c-ee3d-75d663fd6008" [ 1004.999093] env[62923]: _type = "Task" [ 1004.999093] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.007541] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527e792a-4767-153c-ee3d-75d663fd6008, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.033361] env[62923]: DEBUG nova.network.neutron [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [{"id": "990e9014-0a5e-465f-8306-404937c589e0", "address": "fa:16:3e:1f:14:02", "network": {"id": "921d004f-a6f8-43e2-a257-f928334bc752", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1144477314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a654d46357ed49cd95460a56926f102a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap990e9014-0a", "ovs_interfaceid": "990e9014-0a5e-465f-8306-404937c589e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.043209] env[62923]: DEBUG nova.compute.utils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1005.047991] env[62923]: DEBUG nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Not allocating networking since 'none' was specified. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1005.125388] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370367, 'name': Rename_Task, 'duration_secs': 0.299403} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.125720] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.125919] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41a0e8e1-1122-4708-ab7d-cdcd60637340 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.133716] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1005.133716] env[62923]: value = "task-1370370" [ 1005.133716] env[62923]: _type = "Task" [ 1005.133716] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.141925] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370370, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.238867] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e934cc-b15d-4eb1-9d9a-3ed48f1abe4c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.246654] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd1c8e9-60a3-4b65-9b7d-315a0692c1d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.279105] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67604cd2-ea78-4ee0-a8cd-1d058628c4c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.282108] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.282386] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7680511-ff8a-4bf9-8a71-89a9751095c9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.289614] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a95542-439d-451e-a550-9d1db6745433 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.296382] env[62923]: DEBUG oslo_vmware.api [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1005.296382] env[62923]: value = "task-1370371" [ 1005.296382] env[62923]: _type = "Task" [ 1005.296382] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.304622] env[62923]: DEBUG oslo_vmware.api [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.312751] env[62923]: DEBUG nova.compute.provider_tree [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.509633] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527e792a-4767-153c-ee3d-75d663fd6008, 'name': SearchDatastore_Task, 'duration_secs': 0.008674} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.510440] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc994922-a6ff-4d2c-9c40-b3e7e825bb86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.515666] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1005.515666] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5217440b-f6c8-bea3-d046-454803893c11" [ 1005.515666] env[62923]: _type = "Task" [ 1005.515666] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.524597] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5217440b-f6c8-bea3-d046-454803893c11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.536418] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Releasing lock "refresh_cache-92a10f0a-4bfd-405a-956e-3ea29a740b28" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.536689] env[62923]: DEBUG nova.objects.instance [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lazy-loading 'migration_context' on Instance uuid 92a10f0a-4bfd-405a-956e-3ea29a740b28 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.548869] env[62923]: DEBUG nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1005.644923] env[62923]: DEBUG oslo_vmware.api [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370370, 'name': PowerOnVM_Task, 'duration_secs': 0.473005} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.644923] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.645181] env[62923]: INFO nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1005.645181] env[62923]: DEBUG nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1005.645945] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2379eaf-b8fe-45ab-bcd6-74d9d4266fad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.806288] env[62923]: DEBUG oslo_vmware.api [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370371, 'name': PowerOffVM_Task, 'duration_secs': 0.221164} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.806564] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.806739] env[62923]: DEBUG nova.compute.manager [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1005.807624] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6ba163-3fd4-481e-be1c-96fd94991d66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.815890] env[62923]: DEBUG nova.scheduler.client.report [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1006.027017] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5217440b-f6c8-bea3-d046-454803893c11, 'name': SearchDatastore_Task, 'duration_secs': 0.009008} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.027540] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.027829] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 63b16034-87f0-433f-b48c-0e936642534c/63b16034-87f0-433f-b48c-0e936642534c.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.028243] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-731cb4df-08a5-40a2-8772-4294a3a7a24b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.035303] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1006.035303] env[62923]: value = "task-1370372" [ 1006.035303] env[62923]: _type = "Task" [ 1006.035303] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.039126] env[62923]: DEBUG nova.objects.base [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Object Instance<92a10f0a-4bfd-405a-956e-3ea29a740b28> lazy-loaded attributes: info_cache,migration_context {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1006.040175] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8878c3f-e9f4-46cb-859e-5da67eb562ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.047374] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.065974] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-970cd16b-6191-40a6-8fac-e1adc87942e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.071480] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 1006.071480] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5246556c-9d7d-2c93-4274-9e0912500d64" [ 1006.071480] env[62923]: _type = "Task" [ 1006.071480] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.079877] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5246556c-9d7d-2c93-4274-9e0912500d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.164224] env[62923]: INFO nova.compute.manager [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Took 22.65 seconds to build instance. [ 1006.322440] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.783s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.324876] env[62923]: DEBUG oslo_concurrency.lockutils [None req-60c48fd0-d08d-4e4e-8ae9-e66369a016f0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.572s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.325854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.229s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.326120] env[62923]: DEBUG nova.objects.instance [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lazy-loading 'resources' on Instance uuid 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.341922] env[62923]: INFO nova.scheduler.client.report [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted allocations for instance 1b155391-37d9-4186-b70d-84f2dec5af82 [ 1006.545170] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370372, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.567543] env[62923]: DEBUG nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1006.581777] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5246556c-9d7d-2c93-4274-9e0912500d64, 'name': SearchDatastore_Task, 'duration_secs': 0.017249} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.582106] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.596474] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.596722] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.596909] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.597127] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.597278] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.597428] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.597656] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.597849] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.598042] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.598214] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.598444] env[62923]: DEBUG nova.virt.hardware [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.599374] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94db8ced-9f2c-4432-931f-a9f9bc39287b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.610358] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a5f1d0-c153-47c1-ae50-ed2b18f96da4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.625657] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Instance VIF info [] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.631405] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Creating folder: Project (c20a31c6d585475daffe9deb44035068). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.632063] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89eb915b-b50d-4044-b596-3d25c5206884 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.642745] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Created folder: Project (c20a31c6d585475daffe9deb44035068) in parent group-v291405. [ 1006.642957] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Creating folder: Instances. Parent ref: group-v291524. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1006.643232] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf24b83a-fe40-4306-8428-38e3b89c88a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.652904] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Created folder: Instances in parent group-v291524. [ 1006.653207] env[62923]: DEBUG oslo.service.loopingcall [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1006.653419] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.653833] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00280be1-3596-4072-82a4-e5652aecdb1c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.666965] env[62923]: DEBUG oslo_concurrency.lockutils [None req-069eeb8a-5042-4a85-9701-ec41a6dd0745 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.156s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.671792] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.671792] env[62923]: value = "task-1370375" [ 1006.671792] env[62923]: _type = "Task" [ 1006.671792] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.681540] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370375, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.853197] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c36268a9-95b3-43a7-9fed-0b11cc8afde7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1b155391-37d9-4186-b70d-84f2dec5af82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.410s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.005305] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e065842-5fe9-44fe-8968-e4ee66a1d624 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.014434] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff47610-9c95-4612-9c81-1d23a948dc73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.046906] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdeb4c5-ac16-406e-90f3-9d25aa2d6389 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.054373] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911633} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.056514] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 63b16034-87f0-433f-b48c-0e936642534c/63b16034-87f0-433f-b48c-0e936642534c.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.056997] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.057081] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73de50ca-7754-4ddd-b49f-f50532224298 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.059697] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d4c97e-d5d6-49f9-b1eb-41abe9c95039 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.073461] env[62923]: DEBUG nova.compute.provider_tree [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.076164] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1007.076164] env[62923]: value = "task-1370377" [ 1007.076164] env[62923]: _type = "Task" [ 1007.076164] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.084999] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370377, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.107018] env[62923]: DEBUG nova.objects.instance [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.182010] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370375, 'name': CreateVM_Task, 'duration_secs': 0.499608} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.182378] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.182623] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.182813] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.183160] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1007.183423] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086f1586-3a97-4898-982f-2348b2116955 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.188230] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1007.188230] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525fc0bd-4c89-59f6-ca42-8faf2c758927" [ 1007.188230] env[62923]: _type = "Task" [ 1007.188230] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.195602] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525fc0bd-4c89-59f6-ca42-8faf2c758927, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.590756] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370377, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067567} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.591027] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.591795] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c623725-599f-43e3-8dc5-aa262affe4fb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.616298] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 63b16034-87f0-433f-b48c-0e936642534c/63b16034-87f0-433f-b48c-0e936642534c.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.617447] env[62923]: DEBUG nova.scheduler.client.report [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1007.617683] env[62923]: DEBUG nova.compute.provider_tree [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 126 to 127 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1007.617862] env[62923]: DEBUG nova.compute.provider_tree [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.622433] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d17659c2-f2b8-46bd-991f-162fed1411a7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.637707] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.637889] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.638106] env[62923]: DEBUG nova.network.neutron [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.638300] env[62923]: DEBUG nova.objects.instance [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'info_cache' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.646653] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1007.646653] env[62923]: value = "task-1370378" [ 1007.646653] env[62923]: _type = "Task" [ 1007.646653] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.656745] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370378, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.698103] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525fc0bd-4c89-59f6-ca42-8faf2c758927, 'name': SearchDatastore_Task, 'duration_secs': 0.009326} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.698445] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.698684] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.698914] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.699075] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.699261] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.699521] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfc91b32-5d44-4eb8-b3c2-a28a918cb515 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.706780] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.706954] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.707655] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed3921e8-d4b9-43b9-adc9-3d671d2c60e0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.712758] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1007.712758] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c2836b-d06a-6621-4647-4f0ee59c84d9" [ 1007.712758] env[62923]: _type = "Task" [ 1007.712758] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.721963] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c2836b-d06a-6621-4647-4f0ee59c84d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.142730] env[62923]: DEBUG nova.objects.base [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Object Instance<3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7> lazy-loaded attributes: flavor,info_cache {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1008.144563] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.819s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.147095] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.563s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.147336] env[62923]: DEBUG nova.objects.instance [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'resources' on Instance uuid 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.158472] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370378, 'name': ReconfigVM_Task, 'duration_secs': 0.267145} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.158765] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 63b16034-87f0-433f-b48c-0e936642534c/63b16034-87f0-433f-b48c-0e936642534c.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.159410] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79e9b80c-f306-4cfb-ac41-66954d79a9c4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.166384] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1008.166384] env[62923]: value = "task-1370379" [ 1008.166384] env[62923]: _type = "Task" [ 1008.166384] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.170186] env[62923]: INFO nova.scheduler.client.report [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Deleted allocations for instance 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f [ 1008.176662] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370379, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.224258] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c2836b-d06a-6621-4647-4f0ee59c84d9, 'name': SearchDatastore_Task, 'duration_secs': 0.008561} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.225322] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1ed585e-1457-4781-952c-0b079fa2ea44 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.231166] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1008.231166] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52bacb8c-a2b0-737e-5c41-34b3965ea371" [ 1008.231166] env[62923]: _type = "Task" [ 1008.231166] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.239611] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52bacb8c-a2b0-737e-5c41-34b3965ea371, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.679531] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370379, 'name': Rename_Task, 'duration_secs': 0.149577} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.684138] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.686869] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf6e9426-c018-4a3c-862d-62172a1170a8 tempest-ServersTestJSON-56933000 tempest-ServersTestJSON-56933000-project-member] Lock "9f0b13d8-eb25-474c-b9bb-80ee9dd4955f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.276s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.687773] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4479b6cc-4586-4086-8390-08c9f74eeb45 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.694861] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1008.694861] env[62923]: value = "task-1370380" [ 1008.694861] env[62923]: _type = "Task" [ 1008.694861] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.705418] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.742535] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52bacb8c-a2b0-737e-5c41-34b3965ea371, 'name': SearchDatastore_Task, 'duration_secs': 0.00881} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.744663] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.744941] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] d1026124-821b-44c1-b1f6-257597ce1195/d1026124-821b-44c1-b1f6-257597ce1195.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.745407] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f19b7bd4-e21d-4448-a9ca-73e016ceb1ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.751759] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1008.751759] env[62923]: value = "task-1370381" [ 1008.751759] env[62923]: _type = "Task" [ 1008.751759] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.762135] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.817344] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91da062a-ab8f-4e33-925f-05634e5e3fad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.827042] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842725f9-82d8-46c7-8a99-bbf879265952 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.860666] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089c8613-2af9-48f8-9338-fd8a6e254992 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.869549] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749d806c-7af1-4c70-a988-36c5e64c4da9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.883957] env[62923]: DEBUG nova.compute.provider_tree [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.902920] env[62923]: DEBUG nova.network.neutron [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating instance_info_cache with network_info: [{"id": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "address": "fa:16:3e:79:27:67", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dafc2e-d2", "ovs_interfaceid": "70dafc2e-d2a9-49fa-ac00-d46b002927bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.205995] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370380, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.263139] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370381, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.388057] env[62923]: DEBUG nova.scheduler.client.report [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1009.405068] env[62923]: DEBUG oslo_concurrency.lockutils [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "refresh_cache-3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.436623] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.436816] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.437473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.437473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.437473] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.439811] env[62923]: INFO nova.compute.manager [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Terminating instance [ 1009.441628] env[62923]: DEBUG nova.compute.manager [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1009.441892] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.442670] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8054f2f0-814b-4c67-842f-8241b15727d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.450394] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.450642] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95e7399d-e793-452b-ae6b-f33e1869664c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.457291] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1009.457291] env[62923]: value = "task-1370382" [ 1009.457291] env[62923]: _type = "Task" [ 1009.457291] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.465013] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.543837] env[62923]: DEBUG nova.compute.manager [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Stashing vm_state: active {{(pid=62923) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1009.706521] env[62923]: DEBUG oslo_vmware.api [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370380, 'name': PowerOnVM_Task, 'duration_secs': 0.833087} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.706783] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.707086] env[62923]: INFO nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Took 8.77 seconds to spawn the instance on the hypervisor. [ 1009.707306] env[62923]: DEBUG nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1009.708130] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2794fa0-044a-4161-adcf-aab185182539 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.764057] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603836} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.764377] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] d1026124-821b-44c1-b1f6-257597ce1195/d1026124-821b-44c1-b1f6-257597ce1195.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.764611] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.764870] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd897327-f16b-45c8-806c-43cc5b7739b7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.771995] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1009.771995] env[62923]: value = "task-1370383" [ 1009.771995] env[62923]: _type = "Task" [ 1009.771995] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.780081] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.892422] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.745s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.895271] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.313s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.909182] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.909182] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34303041-7f21-41ee-aef2-73014fa8de61 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.915615] env[62923]: DEBUG oslo_vmware.api [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1009.915615] env[62923]: value = "task-1370384" [ 1009.915615] env[62923]: _type = "Task" [ 1009.915615] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.925113] env[62923]: INFO nova.scheduler.client.report [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted allocations for instance 43065826-0f2b-48dc-bc42-8e0fd84fdcd3 [ 1009.932965] env[62923]: DEBUG oslo_vmware.api [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.969384] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370382, 'name': PowerOffVM_Task, 'duration_secs': 0.332447} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.969722] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.969869] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.970074] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08bdd1b5-2438-4f0e-b0a5-d95b2b0d6a6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.048920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.056918] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.057184] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.057391] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleting the datastore file [datastore2] cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.057679] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6907e9b6-2dba-42af-928b-07d07ed8078e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.064452] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for the task: (returnval){ [ 1010.064452] env[62923]: value = "task-1370386" [ 1010.064452] env[62923]: _type = "Task" [ 1010.064452] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.071696] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.074096] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.225064] env[62923]: INFO nova.compute.manager [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Took 16.87 seconds to build instance. [ 1010.282347] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113921} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.282660] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.283520] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3939630c-5132-4bf0-a5cb-6d03f5d2a492 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.304063] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] d1026124-821b-44c1-b1f6-257597ce1195/d1026124-821b-44c1-b1f6-257597ce1195.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.304307] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4800a054-13f9-40b1-a7d5-5c7a4d83a6f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.328016] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1010.328016] env[62923]: value = "task-1370387" [ 1010.328016] env[62923]: _type = "Task" [ 1010.328016] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.336414] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370387, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.429138] env[62923]: DEBUG oslo_vmware.api [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370384, 'name': PowerOnVM_Task, 'duration_secs': 0.449107} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.429138] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.429304] env[62923]: DEBUG nova.compute.manager [None req-f2f1e46f-2108-450b-b60a-2a24698ebb76 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1010.430057] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e1bd84-b2a9-4fe2-a700-a9d6817bf3ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.438493] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2212a37e-a5a2-41de-ab60-325632684262 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "43065826-0f2b-48dc-bc42-8e0fd84fdcd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.993s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.574333] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.578142] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633b980e-4425-46f5-b7ad-9f48fad9edb3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.586254] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a565e2aa-43e5-40b5-9f6c-8dd80a42b0e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.623272] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ced3dff-1198-4f84-9b92-605a666c935a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.630072] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219524b5-12a0-42aa-b32c-d22d5b0b6c26 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.643941] env[62923]: DEBUG nova.compute.provider_tree [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.727305] env[62923]: DEBUG oslo_concurrency.lockutils [None req-78fdbc87-7896-4b9c-9614-142b8999368d tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "63b16034-87f0-433f-b48c-0e936642534c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.384s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.838620] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.079018] env[62923]: DEBUG oslo_vmware.api [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Task: {'id': task-1370386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.606089} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.079018] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.079230] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.079298] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.079465] env[62923]: INFO nova.compute.manager [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1011.079707] env[62923]: DEBUG oslo.service.loopingcall [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.079902] env[62923]: DEBUG nova.compute.manager [-] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.079997] env[62923]: DEBUG nova.network.neutron [-] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.149780] env[62923]: DEBUG nova.scheduler.client.report [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1011.339064] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370387, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.779331] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "interface-63b16034-87f0-433f-b48c-0e936642534c-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.779641] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "interface-63b16034-87f0-433f-b48c-0e936642534c-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.780063] env[62923]: DEBUG nova.objects.instance [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lazy-loading 'flavor' on Instance uuid 63b16034-87f0-433f-b48c-0e936642534c {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.839366] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370387, 'name': ReconfigVM_Task, 'duration_secs': 1.036174} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.839652] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Reconfigured VM instance instance-00000064 to attach disk [datastore2] d1026124-821b-44c1-b1f6-257597ce1195/d1026124-821b-44c1-b1f6-257597ce1195.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.840281] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a58cf41d-c7cc-45bb-8e16-71ca3345ff85 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.846513] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1011.846513] env[62923]: value = "task-1370388" [ 1011.846513] env[62923]: _type = "Task" [ 1011.846513] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.855744] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370388, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.901660] env[62923]: DEBUG nova.network.neutron [-] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.160995] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.266s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.164060] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.115s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.164344] env[62923]: DEBUG nova.objects.instance [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'pci_requests' on Instance uuid 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.284895] env[62923]: DEBUG nova.objects.instance [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lazy-loading 'pci_requests' on Instance uuid 63b16034-87f0-433f-b48c-0e936642534c {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.357198] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370388, 'name': Rename_Task, 'duration_secs': 0.136975} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.357622] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.357834] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d0ec9da-2dc6-4857-8268-731b181d134e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.365323] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1012.365323] env[62923]: value = "task-1370389" [ 1012.365323] env[62923]: _type = "Task" [ 1012.365323] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.372748] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.404521] env[62923]: INFO nova.compute.manager [-] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Took 1.32 seconds to deallocate network for instance. [ 1012.671070] env[62923]: DEBUG nova.objects.instance [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'numa_topology' on Instance uuid 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.731279] env[62923]: INFO nova.scheduler.client.report [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocation for migration 73716037-3a40-4904-be0e-5a06f0bc0a28 [ 1012.787616] env[62923]: DEBUG nova.objects.base [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Object Instance<63b16034-87f0-433f-b48c-0e936642534c> lazy-loaded attributes: flavor,pci_requests {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1012.787861] env[62923]: DEBUG nova.network.neutron [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.873120] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d565a5b6-fcc0-4b9e-8643-911f7777b9c7 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "interface-63b16034-87f0-433f-b48c-0e936642534c-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.093s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.877958] env[62923]: DEBUG oslo_vmware.api [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370389, 'name': PowerOnVM_Task, 'duration_secs': 0.46435} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.877958] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.878202] env[62923]: INFO nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Took 6.31 seconds to spawn the instance on the hypervisor. [ 1012.878345] env[62923]: DEBUG nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1012.879165] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282a05c1-ae75-4934-8ead-1a131581bec2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.911433] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.176797] env[62923]: INFO nova.compute.claims [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.237066] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.395844] env[62923]: INFO nova.compute.manager [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Took 16.86 seconds to build instance. [ 1013.897562] env[62923]: DEBUG oslo_concurrency.lockutils [None req-8a56470f-876c-46e0-b889-acfcf41cbbc5 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "d1026124-821b-44c1-b1f6-257597ce1195" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.372s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.002296] env[62923]: DEBUG nova.compute.manager [req-c8182b3e-d0c1-497f-a5cd-542a89e79c67 req-93bda6f5-8e0d-4cf3-bc36-b65c9d04d93b service nova] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Received event network-vif-deleted-30f8b729-5714-40d3-8c21-f0662d7104c5 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.266422] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.266782] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.267009] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.267294] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.267408] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.269563] env[62923]: INFO nova.compute.manager [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Terminating instance [ 1014.271426] env[62923]: DEBUG nova.compute.manager [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1014.271621] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1014.272548] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc79d78-2d58-4bb9-960a-21f2ab0d68bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.284132] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1014.284132] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-844377b3-4ed5-42f1-89f1-d44c1383db1e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.290181] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 1014.290181] env[62923]: value = "task-1370390" [ 1014.290181] env[62923]: _type = "Task" [ 1014.290181] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.299509] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.328909] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886bae42-d61a-475f-b17d-e95ba20e5bc8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.336318] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788fab0a-d04e-4b1a-b36a-f235d9f8df46 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.366031] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2a870a-2d97-468f-b341-a957a0da7af1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.374056] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7699772-2dad-4162-9b04-19ad359aee15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.387598] env[62923]: DEBUG nova.compute.provider_tree [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.800248] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370390, 'name': PowerOffVM_Task, 'duration_secs': 0.253564} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.800533] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1014.800712] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1014.800962] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f15ace9-d208-4bd2-b189-f5c53380290f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.867789] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1014.867983] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1014.868176] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleting the datastore file [datastore1] 92a10f0a-4bfd-405a-956e-3ea29a740b28 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1014.868450] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6590fb2-8a9c-470d-a084-0614097e56ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.875451] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for the task: (returnval){ [ 1014.875451] env[62923]: value = "task-1370392" [ 1014.875451] env[62923]: _type = "Task" [ 1014.875451] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.885736] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.890770] env[62923]: DEBUG nova.scheduler.client.report [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1015.051520] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "63b16034-87f0-433f-b48c-0e936642534c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.051808] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "63b16034-87f0-433f-b48c-0e936642534c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.052236] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "63b16034-87f0-433f-b48c-0e936642534c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.052549] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "63b16034-87f0-433f-b48c-0e936642534c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.052777] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "63b16034-87f0-433f-b48c-0e936642534c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.054947] env[62923]: INFO nova.compute.manager [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Terminating instance [ 1015.056827] env[62923]: DEBUG nova.compute.manager [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1015.057147] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.058015] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa22769-ab6e-4c62-8323-136d5f85a17c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.066369] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.066689] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00165ef3-8ce2-4fc9-a21a-6df727410a50 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.073658] env[62923]: DEBUG oslo_vmware.api [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1015.073658] env[62923]: value = "task-1370393" [ 1015.073658] env[62923]: _type = "Task" [ 1015.073658] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.082345] env[62923]: DEBUG oslo_vmware.api [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.265285] env[62923]: DEBUG nova.compute.manager [None req-85574c8f-e364-46bf-94a0-ebfca203b8a2 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1015.266731] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6daac704-2bda-4319-8c07-f430cad3fd8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.335827] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "d1026124-821b-44c1-b1f6-257597ce1195" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.337075] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "d1026124-821b-44c1-b1f6-257597ce1195" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.337075] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "d1026124-821b-44c1-b1f6-257597ce1195-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.337075] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "d1026124-821b-44c1-b1f6-257597ce1195-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.337273] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "d1026124-821b-44c1-b1f6-257597ce1195-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.339713] env[62923]: INFO nova.compute.manager [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Terminating instance [ 1015.341501] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "refresh_cache-d1026124-821b-44c1-b1f6-257597ce1195" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.341762] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquired lock "refresh_cache-d1026124-821b-44c1-b1f6-257597ce1195" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.342201] env[62923]: DEBUG nova.network.neutron [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.386367] env[62923]: DEBUG oslo_vmware.api [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Task: {'id': task-1370392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.417409} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.386706] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.386929] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.387166] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1015.387408] env[62923]: INFO nova.compute.manager [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1015.387640] env[62923]: DEBUG oslo.service.loopingcall [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1015.387881] env[62923]: DEBUG nova.compute.manager [-] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1015.388062] env[62923]: DEBUG nova.network.neutron [-] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1015.395797] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.232s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.397834] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.327s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.438696] env[62923]: INFO nova.network.neutron [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating port 545dfb40-7ae4-4d69-86f8-0d334ced67ff with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1015.584447] env[62923]: DEBUG oslo_vmware.api [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370393, 'name': PowerOffVM_Task, 'duration_secs': 0.242973} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.585297] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1015.585297] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1015.585297] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ca47b3d-cb22-4d21-8223-4d85231be662 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.652515] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.652515] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.652515] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Deleting the datastore file [datastore2] 63b16034-87f0-433f-b48c-0e936642534c {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.652735] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-640790a6-7b32-4728-8029-d18511b2bfd7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.659076] env[62923]: DEBUG oslo_vmware.api [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for the task: (returnval){ [ 1015.659076] env[62923]: value = "task-1370395" [ 1015.659076] env[62923]: _type = "Task" [ 1015.659076] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.667796] env[62923]: DEBUG oslo_vmware.api [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.784307] env[62923]: INFO nova.compute.manager [None req-85574c8f-e364-46bf-94a0-ebfca203b8a2 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] instance snapshotting [ 1015.785079] env[62923]: DEBUG nova.objects.instance [None req-85574c8f-e364-46bf-94a0-ebfca203b8a2 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lazy-loading 'flavor' on Instance uuid d1026124-821b-44c1-b1f6-257597ce1195 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.860959] env[62923]: DEBUG nova.network.neutron [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.902691] env[62923]: INFO nova.compute.claims [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1015.913168] env[62923]: DEBUG nova.network.neutron [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.952627] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.952905] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.026349] env[62923]: DEBUG nova.compute.manager [req-41918504-2722-41cd-b5fe-d76c2af41e28 req-bb6ebfbc-5fc5-420d-84af-c8ff38acaa9e service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Received event network-vif-deleted-990e9014-0a5e-465f-8306-404937c589e0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.026552] env[62923]: INFO nova.compute.manager [req-41918504-2722-41cd-b5fe-d76c2af41e28 req-bb6ebfbc-5fc5-420d-84af-c8ff38acaa9e service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Neutron deleted interface 990e9014-0a5e-465f-8306-404937c589e0; detaching it from the instance and deleting it from the info cache [ 1016.026725] env[62923]: DEBUG nova.network.neutron [req-41918504-2722-41cd-b5fe-d76c2af41e28 req-bb6ebfbc-5fc5-420d-84af-c8ff38acaa9e service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.168444] env[62923]: DEBUG oslo_vmware.api [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Task: {'id': task-1370395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327263} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.168806] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1016.169030] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1016.169380] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1016.169380] env[62923]: INFO nova.compute.manager [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1016.169624] env[62923]: DEBUG oslo.service.loopingcall [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.170163] env[62923]: DEBUG nova.compute.manager [-] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1016.170284] env[62923]: DEBUG nova.network.neutron [-] [instance: 63b16034-87f0-433f-b48c-0e936642534c] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1016.176258] env[62923]: DEBUG nova.network.neutron [-] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.291539] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2772e040-7dc6-4bd8-9bf2-22ac6c8c97a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.307625] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9af1078-2e07-4223-8f08-dd856ce0da96 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.411866] env[62923]: INFO nova.compute.resource_tracker [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating resource usage from migration ae5ecb83-a61f-457c-ae39-66bc710f74df [ 1016.416270] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Releasing lock "refresh_cache-d1026124-821b-44c1-b1f6-257597ce1195" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.416652] env[62923]: DEBUG nova.compute.manager [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1016.416839] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.417843] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb730968-07d7-4824-aca5-b1509f0e047f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.425294] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.426012] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b889b1e7-59e0-4b53-9448-49fa5d0230a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.431713] env[62923]: DEBUG oslo_vmware.api [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1016.431713] env[62923]: value = "task-1370396" [ 1016.431713] env[62923]: _type = "Task" [ 1016.431713] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.442766] env[62923]: DEBUG oslo_vmware.api [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370396, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.455346] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1016.530201] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be827b6b-7ef1-42a6-b617-baae644227e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.538936] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2f8ce3-6da8-4dda-b064-1ade19430dce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.567300] env[62923]: DEBUG nova.compute.manager [req-41918504-2722-41cd-b5fe-d76c2af41e28 req-bb6ebfbc-5fc5-420d-84af-c8ff38acaa9e service nova] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Detach interface failed, port_id=990e9014-0a5e-465f-8306-404937c589e0, reason: Instance 92a10f0a-4bfd-405a-956e-3ea29a740b28 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1016.582061] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a6d116-008e-46a0-b938-11c659c47fc2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.589508] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87336cf-9b95-41b0-8344-b627e3365ab8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.619293] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370e5c4f-31f2-4bd7-9df9-6470c561bb73 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.626965] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c5ca64-ce9a-489b-b308-4b374dae3200 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.642091] env[62923]: DEBUG nova.compute.provider_tree [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.679057] env[62923]: INFO nova.compute.manager [-] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Took 1.29 seconds to deallocate network for instance. [ 1016.816557] env[62923]: DEBUG nova.compute.manager [None req-85574c8f-e364-46bf-94a0-ebfca203b8a2 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Instance disappeared during snapshot {{(pid=62923) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 1016.918965] env[62923]: DEBUG nova.network.neutron [-] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.924305] env[62923]: DEBUG nova.compute.manager [req-f9ccecd6-7c8d-428c-85a2-1d0f6b7a3ff0 req-3fd00feb-7bd0-4c04-ab2a-ad77b342d941 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-vif-plugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.924609] env[62923]: DEBUG oslo_concurrency.lockutils [req-f9ccecd6-7c8d-428c-85a2-1d0f6b7a3ff0 req-3fd00feb-7bd0-4c04-ab2a-ad77b342d941 service nova] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.924727] env[62923]: DEBUG oslo_concurrency.lockutils [req-f9ccecd6-7c8d-428c-85a2-1d0f6b7a3ff0 req-3fd00feb-7bd0-4c04-ab2a-ad77b342d941 service nova] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.924890] env[62923]: DEBUG oslo_concurrency.lockutils [req-f9ccecd6-7c8d-428c-85a2-1d0f6b7a3ff0 req-3fd00feb-7bd0-4c04-ab2a-ad77b342d941 service nova] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.925096] env[62923]: DEBUG nova.compute.manager [req-f9ccecd6-7c8d-428c-85a2-1d0f6b7a3ff0 req-3fd00feb-7bd0-4c04-ab2a-ad77b342d941 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] No waiting events found dispatching network-vif-plugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1016.925287] env[62923]: WARNING nova.compute.manager [req-f9ccecd6-7c8d-428c-85a2-1d0f6b7a3ff0 req-3fd00feb-7bd0-4c04-ab2a-ad77b342d941 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received unexpected event network-vif-plugged-545dfb40-7ae4-4d69-86f8-0d334ced67ff for instance with vm_state shelved_offloaded and task_state spawning. [ 1016.944663] env[62923]: DEBUG oslo_vmware.api [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370396, 'name': PowerOffVM_Task, 'duration_secs': 0.218594} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.944925] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.945132] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.945398] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfc5453c-832c-4a3e-a84a-be8e85dd2df3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.956457] env[62923]: DEBUG nova.compute.manager [None req-85574c8f-e364-46bf-94a0-ebfca203b8a2 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Found 0 images (rotation: 2) {{(pid=62923) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1016.969151] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1016.969388] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1016.969573] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Deleting the datastore file [datastore2] d1026124-821b-44c1-b1f6-257597ce1195 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.969964] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-418dc36e-a393-48af-8ca7-ef13c431b1c2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.975794] env[62923]: DEBUG oslo_vmware.api [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for the task: (returnval){ [ 1016.975794] env[62923]: value = "task-1370398" [ 1016.975794] env[62923]: _type = "Task" [ 1016.975794] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.980607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.987106] env[62923]: DEBUG oslo_vmware.api [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.001118] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.001369] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.001516] env[62923]: DEBUG nova.network.neutron [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.166129] env[62923]: ERROR nova.scheduler.client.report [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [req-cafba696-a8ec-4a17-8416-769ca5b40272] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cafba696-a8ec-4a17-8416-769ca5b40272"}]} [ 1017.184631] env[62923]: DEBUG nova.scheduler.client.report [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1017.187755] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.204952] env[62923]: DEBUG nova.scheduler.client.report [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1017.205384] env[62923]: DEBUG nova.compute.provider_tree [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1017.219206] env[62923]: DEBUG nova.scheduler.client.report [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1017.240404] env[62923]: DEBUG nova.scheduler.client.report [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1017.371666] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3fa5bf-98e2-4b11-a1e8-8e215043fb59 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.379738] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6c7fe8-45ab-4db2-8759-9d1336634d67 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.411375] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc16095f-538e-437c-bbca-6cee2acf2924 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.420330] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b329dc5a-2b00-49dc-bcad-f9d7a05a08f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.422737] env[62923]: INFO nova.compute.manager [-] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Took 1.25 seconds to deallocate network for instance. [ 1017.434631] env[62923]: DEBUG nova.compute.provider_tree [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1017.484930] env[62923]: DEBUG oslo_vmware.api [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Task: {'id': task-1370398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140583} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.485200] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.485389] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.485566] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.485737] env[62923]: INFO nova.compute.manager [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1017.485977] env[62923]: DEBUG oslo.service.loopingcall [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.486254] env[62923]: DEBUG nova.compute.manager [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1017.486343] env[62923]: DEBUG nova.network.neutron [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1017.500658] env[62923]: DEBUG nova.network.neutron [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.776408] env[62923]: DEBUG nova.network.neutron [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap545dfb40-7a", "ovs_interfaceid": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.937199] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.963415] env[62923]: DEBUG nova.scheduler.client.report [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 131 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1017.963694] env[62923]: DEBUG nova.compute.provider_tree [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 131 to 132 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1017.963884] env[62923]: DEBUG nova.compute.provider_tree [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1018.003313] env[62923]: DEBUG nova.network.neutron [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.051178] env[62923]: DEBUG nova.compute.manager [req-b5bb8fe2-d663-4f5b-b29c-0558839c3a05 req-7df935f8-3f30-4f9c-8185-20eb79d9d617 service nova] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Received event network-vif-deleted-c844f692-0b8e-41c0-b17b-0259ef7ee633 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.278714] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.303743] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='072d262d3008eea307ac8c27d79ac212',container_format='bare',created_at=2024-10-29T12:09:27Z,direct_url=,disk_format='vmdk',id=51961a0b-7b31-4a1e-b2f1-1914040134b5,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-453037088-shelved',owner='418b805157a74173b5cfe13ea5b61c13',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2024-10-29T12:09:41Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.304012] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.304199] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.304390] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.304540] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.304688] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.304947] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.305135] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.305309] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.305474] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.305645] env[62923]: DEBUG nova.virt.hardware [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.306560] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49937638-a796-40d1-8598-a357444c9cd5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.315031] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b68624-a948-48b2-8c71-e88b7b47bcfe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.328119] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:f5:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '545dfb40-7ae4-4d69-86f8-0d334ced67ff', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.335675] env[62923]: DEBUG oslo.service.loopingcall [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.335967] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.336200] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b669b258-ccb7-4200-a01c-12da3b51a51f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.354866] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.354866] env[62923]: value = "task-1370399" [ 1018.354866] env[62923]: _type = "Task" [ 1018.354866] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.362051] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370399, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.469471] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.071s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.469642] env[62923]: INFO nova.compute.manager [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Migrating [ 1018.476345] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.565s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.476626] env[62923]: DEBUG nova.objects.instance [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lazy-loading 'resources' on Instance uuid cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.506178] env[62923]: INFO nova.compute.manager [-] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Took 1.02 seconds to deallocate network for instance. [ 1018.865016] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370399, 'name': CreateVM_Task, 'duration_secs': 0.30201} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.865186] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1018.865864] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.866089] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.866461] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1018.866721] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ab4c8c1-1764-48f5-816a-a4421bf0a50f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.870888] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1018.870888] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526c9561-36a6-6f02-5d84-30c3c231ee21" [ 1018.870888] env[62923]: _type = "Task" [ 1018.870888] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.878042] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]526c9561-36a6-6f02-5d84-30c3c231ee21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.948617] env[62923]: DEBUG nova.compute.manager [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.948861] env[62923]: DEBUG nova.compute.manager [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing instance network info cache due to event network-changed-545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1018.949042] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.949138] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.949301] env[62923]: DEBUG nova.network.neutron [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Refreshing network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1018.989854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.990048] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.990228] env[62923]: DEBUG nova.network.neutron [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.011633] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.136231] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f8901e-b493-495f-aab9-4336aa55c878 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.144814] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4088a9fb-6d8d-4ac5-b7e4-923e2bcf40e9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.174945] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65477c6d-b7eb-4b9a-a728-94e83136181d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.181719] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2a3c62-af3f-452a-a9c9-4fd711a1bf9b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.194351] env[62923]: DEBUG nova.compute.provider_tree [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.381499] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.381769] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Processing image 51961a0b-7b31-4a1e-b2f1-1914040134b5 {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.382011] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.382173] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.382356] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.382604] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6ef90c5-3c7e-4ece-ac92-969816924e88 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.390605] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.390780] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.391491] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e65cb4c-8ecf-40a4-b90a-884773969304 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.396474] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1019.396474] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521dbdf0-aa3c-73a2-cae8-7bdc9730bf6e" [ 1019.396474] env[62923]: _type = "Task" [ 1019.396474] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.404415] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521dbdf0-aa3c-73a2-cae8-7bdc9730bf6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.666989] env[62923]: DEBUG nova.network.neutron [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updated VIF entry in instance network info cache for port 545dfb40-7ae4-4d69-86f8-0d334ced67ff. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.667414] env[62923]: DEBUG nova.network.neutron [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [{"id": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "address": "fa:16:3e:18:f5:c7", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap545dfb40-7a", "ovs_interfaceid": "545dfb40-7ae4-4d69-86f8-0d334ced67ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.692399] env[62923]: DEBUG nova.network.neutron [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.697433] env[62923]: DEBUG nova.scheduler.client.report [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1019.906854] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Preparing fetch location {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1019.906854] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Fetch image to [datastore2] OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7/OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7.vmdk {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1019.907122] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Downloading stream optimized image 51961a0b-7b31-4a1e-b2f1-1914040134b5 to [datastore2] OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7/OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7.vmdk on the data store datastore2 as vApp {{(pid=62923) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1019.907209] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Downloading image file data 51961a0b-7b31-4a1e-b2f1-1914040134b5 to the ESX as VM named 'OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7' {{(pid=62923) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1019.992958] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1019.992958] env[62923]: value = "resgroup-9" [ 1019.992958] env[62923]: _type = "ResourcePool" [ 1019.992958] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1019.993360] env[62923]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f04e1f11-eefb-4678-b286-8b46328c2d58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.013771] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease: (returnval){ [ 1020.013771] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527522e5-b74e-e044-bbdd-b3e47b7d9734" [ 1020.013771] env[62923]: _type = "HttpNfcLease" [ 1020.013771] env[62923]: } obtained for vApp import into resource pool (val){ [ 1020.013771] env[62923]: value = "resgroup-9" [ 1020.013771] env[62923]: _type = "ResourcePool" [ 1020.013771] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1020.014102] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the lease: (returnval){ [ 1020.014102] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527522e5-b74e-e044-bbdd-b3e47b7d9734" [ 1020.014102] env[62923]: _type = "HttpNfcLease" [ 1020.014102] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1020.019844] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1020.019844] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527522e5-b74e-e044-bbdd-b3e47b7d9734" [ 1020.019844] env[62923]: _type = "HttpNfcLease" [ 1020.019844] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1020.170167] env[62923]: DEBUG oslo_concurrency.lockutils [req-1d26f6a6-3a3c-490f-9295-aefa205f468c req-18df0d1b-f973-49e7-9742-9dcc41ee3f7a service nova] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.194994] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.201557] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.203813] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.223s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.205468] env[62923]: INFO nova.compute.claims [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.221576] env[62923]: INFO nova.scheduler.client.report [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Deleted allocations for instance cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0 [ 1020.522821] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1020.522821] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527522e5-b74e-e044-bbdd-b3e47b7d9734" [ 1020.522821] env[62923]: _type = "HttpNfcLease" [ 1020.522821] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1020.523673] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1020.523673] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527522e5-b74e-e044-bbdd-b3e47b7d9734" [ 1020.523673] env[62923]: _type = "HttpNfcLease" [ 1020.523673] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1020.523872] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddc8c68-2980-4438-b6a1-ecb45101e33a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.531082] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52372066-4ad7-995e-4c7c-848810f8e31b/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1020.531274] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52372066-4ad7-995e-4c7c-848810f8e31b/disk-0.vmdk. {{(pid=62923) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1020.596059] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f1c90815-7b7e-456f-af4d-d00c2ae32308 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.733622] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5c1fe06a-7451-4648-9c08-527f17a6a1f2 tempest-ServerDiskConfigTestJSON-92227930 tempest-ServerDiskConfigTestJSON-92227930-project-member] Lock "cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.296s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.391200] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3565ab61-fe9d-45cf-b189-ff302678647e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.400896] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4d7d23-e0a4-4307-870f-62769365679b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.439499] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58e464f-6a37-4dd9-b5a4-87de61ff073f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.448184] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a839ea0-1d7c-43d5-b3cc-414b210accea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.462933] env[62923]: DEBUG nova.compute.provider_tree [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.705934] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Completed reading data from the image iterator. {{(pid=62923) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1021.706257] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52372066-4ad7-995e-4c7c-848810f8e31b/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1021.707247] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22455193-ee6b-456b-a61f-7a9bf6188603 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.713834] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcd0480-c8e9-4e92-98f8-65b1a3ca1ba3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.719420] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52372066-4ad7-995e-4c7c-848810f8e31b/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1021.719420] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52372066-4ad7-995e-4c7c-848810f8e31b/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1021.719420] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-e27e5df0-a142-4ce4-a334-6774f901ca9a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.735524] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 0 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1021.909699] env[62923]: DEBUG oslo_vmware.rw_handles [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52372066-4ad7-995e-4c7c-848810f8e31b/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1021.909928] env[62923]: INFO nova.virt.vmwareapi.images [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Downloaded image file data 51961a0b-7b31-4a1e-b2f1-1914040134b5 [ 1021.910747] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a122c469-c79c-4ac1-addd-728d99d3162c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.926743] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab8a683a-27b1-4a43-a224-bd1154d6717e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.968030] env[62923]: DEBUG nova.scheduler.client.report [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1021.978128] env[62923]: INFO nova.virt.vmwareapi.images [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] The imported VM was unregistered [ 1021.980627] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Caching image {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1021.980906] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating directory with path [datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5 {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.981206] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e0af606-2b71-4a4b-9120-d6e339bb3f00 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.992122] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created directory with path [datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5 {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.992184] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7/OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7.vmdk to [datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk. {{(pid=62923) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1021.992749] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bd363e8a-8a39-4c67-8318-cf2c62190fdf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.998905] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1021.998905] env[62923]: value = "task-1370402" [ 1021.998905] env[62923]: _type = "Task" [ 1021.998905] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.006891] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.243727] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.244169] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8cce2140-e097-423c-a67b-cf1fc0b15f12 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.253773] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1022.253773] env[62923]: value = "task-1370403" [ 1022.253773] env[62923]: _type = "Task" [ 1022.253773] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.271347] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.473012] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.473616] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1022.476645] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.289s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.476850] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.479634] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.542s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.479946] env[62923]: DEBUG nova.objects.instance [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lazy-loading 'resources' on Instance uuid 63b16034-87f0-433f-b48c-0e936642534c {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.504882] env[62923]: INFO nova.scheduler.client.report [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Deleted allocations for instance 92a10f0a-4bfd-405a-956e-3ea29a740b28 [ 1022.512904] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.764107] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.983480] env[62923]: DEBUG nova.compute.utils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1022.988589] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1022.988589] env[62923]: DEBUG nova.network.neutron [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.017805] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.018508] env[62923]: DEBUG oslo_concurrency.lockutils [None req-defae447-33bd-4a6f-8353-d6cb502805c5 tempest-DeleteServersTestJSON-1508743917 tempest-DeleteServersTestJSON-1508743917-project-member] Lock "92a10f0a-4bfd-405a-956e-3ea29a740b28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.752s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.041521] env[62923]: DEBUG nova.policy [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d2829faa8f74da8a1432abd0c2434f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d290a91b3b4d9491f755fd3d7e7894', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1023.135280] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd60182-0cb4-49fe-a561-56bdcc05daf1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.145085] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23638998-34cf-4e93-b2d8-5e46335ea499 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.182175] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05400146-0bd9-428e-83a2-ac07857bd520 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.191192] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1b02a2-de2e-4b4a-aa3f-d16c2aaa8705 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.206097] env[62923]: DEBUG nova.compute.provider_tree [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.267133] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370403, 'name': PowerOffVM_Task, 'duration_secs': 0.920694} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.267832] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.267832] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 17 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1023.382473] env[62923]: DEBUG nova.network.neutron [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Successfully created port: bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.489863] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1023.511883] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.709749] env[62923]: DEBUG nova.scheduler.client.report [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.777456] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1023.777724] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1023.777871] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.778068] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1023.778223] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.778376] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1023.778640] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1023.778963] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1023.779112] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1023.779187] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1023.779332] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1023.784622] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-113feb07-532d-46ff-aa77-4414e8fba538 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.802541] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1023.802541] env[62923]: value = "task-1370404" [ 1023.802541] env[62923]: _type = "Task" [ 1023.802541] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.810944] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370404, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.012311] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.215954] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.218764] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.207s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.219024] env[62923]: DEBUG nova.objects.instance [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lazy-loading 'resources' on Instance uuid d1026124-821b-44c1-b1f6-257597ce1195 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.242237] env[62923]: INFO nova.scheduler.client.report [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Deleted allocations for instance 63b16034-87f0-433f-b48c-0e936642534c [ 1024.316945] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370404, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.499951] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1024.518061] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.531809] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1024.536423] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1024.536772] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.536866] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1024.536975] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.537222] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1024.537477] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1024.537646] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1024.537816] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1024.537982] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1024.538275] env[62923]: DEBUG nova.virt.hardware [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.539125] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94b9563-1330-4930-b114-1e95f8f709fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.546800] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1318ec70-390a-4d00-bcd8-bf5d05168df7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.756847] env[62923]: DEBUG oslo_concurrency.lockutils [None req-09bba434-028a-4cf4-8519-655730459b55 tempest-AttachInterfacesV270Test-276562382 tempest-AttachInterfacesV270Test-276562382-project-member] Lock "63b16034-87f0-433f-b48c-0e936642534c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.704s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.814780] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370404, 'name': ReconfigVM_Task, 'duration_secs': 0.71382} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.818073] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 33 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1024.859830] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1836dc-197c-4baa-8344-49791729aeea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.867654] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049207e9-19ae-400d-b285-ca3451bd5a6b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.898300] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7db409a-d742-4c2b-b0ce-90e9148fa151 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.906446] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1662a6fd-6c22-4a35-b640-7ad94aab6a80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.922177] env[62923]: DEBUG nova.compute.provider_tree [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1025.014427] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370402, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.521314} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.014768] env[62923]: INFO nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7/OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7.vmdk to [datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk. [ 1025.014994] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Cleaning up location [datastore2] OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7 {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1025.015231] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7b02f8e9-e65e-4887-9d80-2d448f6b81a7 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.015563] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e89a4f2-7b77-4232-b8a6-1c29cc7a0593 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.022774] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1025.022774] env[62923]: value = "task-1370405" [ 1025.022774] env[62923]: _type = "Task" [ 1025.022774] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.032741] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.078023] env[62923]: DEBUG nova.compute.manager [req-43295bf4-e1de-4abe-b523-9539db3e7548 req-ff549f2a-8086-41b3-a9f0-fe31d48b98d5 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Received event network-vif-plugged-bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.078265] env[62923]: DEBUG oslo_concurrency.lockutils [req-43295bf4-e1de-4abe-b523-9539db3e7548 req-ff549f2a-8086-41b3-a9f0-fe31d48b98d5 service nova] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.078473] env[62923]: DEBUG oslo_concurrency.lockutils [req-43295bf4-e1de-4abe-b523-9539db3e7548 req-ff549f2a-8086-41b3-a9f0-fe31d48b98d5 service nova] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.078654] env[62923]: DEBUG oslo_concurrency.lockutils [req-43295bf4-e1de-4abe-b523-9539db3e7548 req-ff549f2a-8086-41b3-a9f0-fe31d48b98d5 service nova] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.078818] env[62923]: DEBUG nova.compute.manager [req-43295bf4-e1de-4abe-b523-9539db3e7548 req-ff549f2a-8086-41b3-a9f0-fe31d48b98d5 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] No waiting events found dispatching network-vif-plugged-bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1025.078981] env[62923]: WARNING nova.compute.manager [req-43295bf4-e1de-4abe-b523-9539db3e7548 req-ff549f2a-8086-41b3-a9f0-fe31d48b98d5 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Received unexpected event network-vif-plugged-bb561b4b-5c6a-4cc2-b404-07800286d632 for instance with vm_state building and task_state spawning. [ 1025.324019] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1025.324319] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1025.324481] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.324662] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1025.324808] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.324955] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1025.325383] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1025.325567] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1025.325747] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1025.325916] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1025.326327] env[62923]: DEBUG nova.virt.hardware [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1025.332298] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1025.332580] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0657b0ba-a35f-42ac-b852-b4dfafec21d2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.352400] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1025.352400] env[62923]: value = "task-1370406" [ 1025.352400] env[62923]: _type = "Task" [ 1025.352400] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.360830] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.389523] env[62923]: DEBUG nova.network.neutron [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Successfully updated port: bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.460154] env[62923]: DEBUG nova.scheduler.client.report [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 132 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1025.460513] env[62923]: DEBUG nova.compute.provider_tree [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 132 to 133 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1025.460721] env[62923]: DEBUG nova.compute.provider_tree [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1025.532640] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043166} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.532983] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.533125] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.533397] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk to [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.533665] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f350716-04ab-4c99-933f-368022b61739 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.539643] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1025.539643] env[62923]: value = "task-1370407" [ 1025.539643] env[62923]: _type = "Task" [ 1025.539643] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.547158] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.862765] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370406, 'name': ReconfigVM_Task, 'duration_secs': 0.154377} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.863327] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1025.864462] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4854020d-6a3f-4d88-ad37-63c216ba7e1e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.887245] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8/1fef5eb2-acb0-4d00-81a3-c270af7df0e8.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.887625] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c9f99e3-5fce-4aee-8914-c526224024dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.902568] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.902713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.902907] env[62923]: DEBUG nova.network.neutron [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.909176] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1025.909176] env[62923]: value = "task-1370408" [ 1025.909176] env[62923]: _type = "Task" [ 1025.909176] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.921137] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370408, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.967828] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.005355] env[62923]: INFO nova.scheduler.client.report [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Deleted allocations for instance d1026124-821b-44c1-b1f6-257597ce1195 [ 1026.052764] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.420224] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370408, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.441338] env[62923]: DEBUG nova.network.neutron [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.515835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b6de394a-b25c-44a7-b212-edcd776e8051 tempest-ServersAaction247Test-328267384 tempest-ServersAaction247Test-328267384-project-member] Lock "d1026124-821b-44c1-b1f6-257597ce1195" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.179s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.557802] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.660939] env[62923]: DEBUG nova.network.neutron [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updating instance_info_cache with network_info: [{"id": "bb561b4b-5c6a-4cc2-b404-07800286d632", "address": "fa:16:3e:f9:b5:7d", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb561b4b-5c", "ovs_interfaceid": "bb561b4b-5c6a-4cc2-b404-07800286d632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.921385] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370408, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.052869] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.116756] env[62923]: DEBUG nova.compute.manager [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Received event network-changed-bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1027.117245] env[62923]: DEBUG nova.compute.manager [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Refreshing instance network info cache due to event network-changed-bb561b4b-5c6a-4cc2-b404-07800286d632. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1027.117245] env[62923]: DEBUG oslo_concurrency.lockutils [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] Acquiring lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.165376] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.165841] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Instance network_info: |[{"id": "bb561b4b-5c6a-4cc2-b404-07800286d632", "address": "fa:16:3e:f9:b5:7d", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb561b4b-5c", "ovs_interfaceid": "bb561b4b-5c6a-4cc2-b404-07800286d632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1027.166086] env[62923]: DEBUG oslo_concurrency.lockutils [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] Acquired lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.166339] env[62923]: DEBUG nova.network.neutron [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Refreshing network info cache for port bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.168300] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:b5:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a78d5760-0bb1-4476-9578-8ad3c3144439', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb561b4b-5c6a-4cc2-b404-07800286d632', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.181846] env[62923]: DEBUG oslo.service.loopingcall [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.185589] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.186696] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14c76a94-3d7f-4ab9-8a99-cc502eeb3831 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.209116] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.209116] env[62923]: value = "task-1370409" [ 1027.209116] env[62923]: _type = "Task" [ 1027.209116] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.218822] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370409, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.420268] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370408, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.552157] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.691689] env[62923]: DEBUG nova.network.neutron [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updated VIF entry in instance network info cache for port bb561b4b-5c6a-4cc2-b404-07800286d632. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.691689] env[62923]: DEBUG nova.network.neutron [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updating instance_info_cache with network_info: [{"id": "bb561b4b-5c6a-4cc2-b404-07800286d632", "address": "fa:16:3e:f9:b5:7d", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb561b4b-5c", "ovs_interfaceid": "bb561b4b-5c6a-4cc2-b404-07800286d632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.723837] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370409, 'name': CreateVM_Task, 'duration_secs': 0.358278} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.724288] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.725140] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.725469] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.725913] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1027.726376] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-409a1c6f-d017-4394-bc58-f3619765ff0f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.732224] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1027.732224] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b60670-9539-c780-c087-d6412da6bb6f" [ 1027.732224] env[62923]: _type = "Task" [ 1027.732224] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.741810] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b60670-9539-c780-c087-d6412da6bb6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.921841] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370408, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.052520] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.193240] env[62923]: DEBUG oslo_concurrency.lockutils [req-6cb1b515-b476-4e0e-9f45-ff0ed8774fae req-be865c6c-1da7-4552-a3af-287a495dde1e service nova] Releasing lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.244749] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b60670-9539-c780-c087-d6412da6bb6f, 'name': SearchDatastore_Task, 'duration_secs': 0.012926} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.245176] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.245420] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.245655] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.245868] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.246098] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.246408] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d9ba1f0-222c-4c68-a425-5879a87317de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.254437] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.255134] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.255350] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3594ba02-a7cd-49ae-87ab-a1172d9ddfb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.261418] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1028.261418] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a20369-fbf1-2d15-932b-6180d4729290" [ 1028.261418] env[62923]: _type = "Task" [ 1028.261418] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.272095] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a20369-fbf1-2d15-932b-6180d4729290, 'name': SearchDatastore_Task, 'duration_secs': 0.009012} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.272897] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d88e7f98-3058-47af-970a-62c953714c7a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.278057] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1028.278057] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52903806-4f46-e347-01c9-bd1f128293d5" [ 1028.278057] env[62923]: _type = "Task" [ 1028.278057] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.286269] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52903806-4f46-e347-01c9-bd1f128293d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.422254] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370408, 'name': ReconfigVM_Task, 'duration_secs': 2.368424} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.422554] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8/1fef5eb2-acb0-4d00-81a3-c270af7df0e8.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.422829] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 50 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1028.552844] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370407, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.59022} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.553092] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/51961a0b-7b31-4a1e-b2f1-1914040134b5/51961a0b-7b31-4a1e-b2f1-1914040134b5.vmdk to [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.553878] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ba99df-2ca7-4c66-84a0-36433672f6ae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.575282] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk or device None with type streamOptimized {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1028.576327] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f28824d8-7df7-423f-96c4-4f5798ee1654 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.596148] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1028.596148] env[62923]: value = "task-1370410" [ 1028.596148] env[62923]: _type = "Task" [ 1028.596148] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.603981] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.791153] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52903806-4f46-e347-01c9-bd1f128293d5, 'name': SearchDatastore_Task, 'duration_secs': 0.007768} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.791153] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.791153] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] aae1a2a3-57da-4846-8240-ac0626e9ebd8/aae1a2a3-57da-4846-8240-ac0626e9ebd8.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.791153] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d45b58c-d7b3-4132-8d08-b0fb458f25e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.799029] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1028.799029] env[62923]: value = "task-1370411" [ 1028.799029] env[62923]: _type = "Task" [ 1028.799029] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.809797] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.931241] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d56b39-2504-4111-a196-5ad08d872e24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.957367] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d54713-e949-46f7-b139-5afe3f587203 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.981958] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 67 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1029.107954] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370410, 'name': ReconfigVM_Task, 'duration_secs': 0.272342} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.110776] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9/75f9473f-ca67-4bb5-8663-0ce3709885e9.vmdk or device None with type streamOptimized {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.110776] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'size': 0, 'encryption_secret_uuid': None, 'device_type': 'disk', 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'encryption_options': None, 'device_name': '/dev/sda', 'disk_bus': None, 'image_id': 'cd84cf13-77b9-4bc1-bb15-31bece605a8e'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291523', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'name': 'volume-7c87212b-d640-4716-ace2-030c4b7ed621', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '75f9473f-ca67-4bb5-8663-0ce3709885e9', 'attached_at': '', 'detached_at': '', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'serial': '7c87212b-d640-4716-ace2-030c4b7ed621'}, 'mount_device': '/dev/sdb', 'attachment_id': '6c5a7691-dca0-42c6-ab9f-9ffae3a77180', 'guest_format': None, 'disk_bus': None, 'delete_on_termination': False, 'boot_index': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62923) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1029.110776] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1029.110776] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291523', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'name': 'volume-7c87212b-d640-4716-ace2-030c4b7ed621', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '75f9473f-ca67-4bb5-8663-0ce3709885e9', 'attached_at': '', 'detached_at': '', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'serial': '7c87212b-d640-4716-ace2-030c4b7ed621'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1029.111767] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d572cf48-e4fb-4317-9d8f-1aa8ace5bdb2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.129840] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f52f1f-a88c-46c5-85c2-a7fb639ae6bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.163839] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] volume-7c87212b-d640-4716-ace2-030c4b7ed621/volume-7c87212b-d640-4716-ace2-030c4b7ed621.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.164120] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42b3c66c-c8b2-4e9a-9e36-39e0ea370fee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.190677] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1029.190677] env[62923]: value = "task-1370412" [ 1029.190677] env[62923]: _type = "Task" [ 1029.190677] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.202477] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370412, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.309869] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370411, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.530872] env[62923]: DEBUG nova.network.neutron [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Port 1fb7d101-34b0-45db-b473-84c94e4b9aaa binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1029.703278] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370412, 'name': ReconfigVM_Task, 'duration_secs': 0.381179} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.703677] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfigured VM instance instance-00000057 to attach disk [datastore2] volume-7c87212b-d640-4716-ace2-030c4b7ed621/volume-7c87212b-d640-4716-ace2-030c4b7ed621.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.708485] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19c6045c-8ff9-41e7-97c7-a351c5cde992 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.723064] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1029.723064] env[62923]: value = "task-1370413" [ 1029.723064] env[62923]: _type = "Task" [ 1029.723064] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.732539] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.808669] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370411, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544321} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.810045] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] aae1a2a3-57da-4846-8240-ac0626e9ebd8/aae1a2a3-57da-4846-8240-ac0626e9ebd8.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.810045] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.810214] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3a23a4a-35d1-4f00-b5ea-76c387d7122d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.818712] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1029.818712] env[62923]: value = "task-1370414" [ 1029.818712] env[62923]: _type = "Task" [ 1029.818712] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.827429] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370414, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.233654] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370413, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.331022] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370414, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077669} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.331022] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.331022] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130cbc1b-13aa-4a32-88da-e4be0c3d21c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.353501] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "62889af3-06e9-4f5e-9ab0-87024e0678ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.353756] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.366719] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] aae1a2a3-57da-4846-8240-ac0626e9ebd8/aae1a2a3-57da-4846-8240-ac0626e9ebd8.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.369035] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cc4cf88-e7ba-4f0a-b5d9-1beadf1bb6b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.387431] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "19e75201-8918-4b27-928b-633849222daf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.387688] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "19e75201-8918-4b27-928b-633849222daf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.394901] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1030.394901] env[62923]: value = "task-1370415" [ 1030.394901] env[62923]: _type = "Task" [ 1030.394901] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.405390] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370415, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.547952] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.548218] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.548387] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.743833] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370413, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.773118] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.773118] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.856718] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1030.892954] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1030.906761] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370415, 'name': ReconfigVM_Task, 'duration_secs': 0.29786} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.907241] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Reconfigured VM instance instance-00000065 to attach disk [datastore1] aae1a2a3-57da-4846-8240-ac0626e9ebd8/aae1a2a3-57da-4846-8240-ac0626e9ebd8.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.908096] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58543ca4-606c-42cd-ad3f-d230deaefb80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.915043] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1030.915043] env[62923]: value = "task-1370416" [ 1030.915043] env[62923]: _type = "Task" [ 1030.915043] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.927065] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370416, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.237106] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370413, 'name': ReconfigVM_Task, 'duration_secs': 1.143454} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.237979] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291523', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'name': 'volume-7c87212b-d640-4716-ace2-030c4b7ed621', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '75f9473f-ca67-4bb5-8663-0ce3709885e9', 'attached_at': '', 'detached_at': '', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'serial': '7c87212b-d640-4716-ace2-030c4b7ed621'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1031.239478] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf12b85b-f28f-4b98-a366-8a92fb080ffe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.247347] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1031.247347] env[62923]: value = "task-1370417" [ 1031.247347] env[62923]: _type = "Task" [ 1031.247347] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.257400] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370417, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.278023] env[62923]: DEBUG nova.compute.utils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1031.386286] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.386286] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.386621] env[62923]: INFO nova.compute.claims [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.426272] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.432087] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370416, 'name': Rename_Task, 'duration_secs': 0.125939} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.432380] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1031.432642] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50d9a1f3-3607-4b75-afeb-9d4e06750a3d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.438967] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1031.438967] env[62923]: value = "task-1370418" [ 1031.438967] env[62923]: _type = "Task" [ 1031.438967] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.447192] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.599755] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.599755] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.599846] env[62923]: DEBUG nova.network.neutron [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.761908] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370417, 'name': Rename_Task, 'duration_secs': 0.154433} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.766358] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1031.766670] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-040319a7-11ce-4ec5-bf8d-e770df4d0c9b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.773107] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1031.773107] env[62923]: value = "task-1370419" [ 1031.773107] env[62923]: _type = "Task" [ 1031.773107] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.779751] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.786304] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370419, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.951319] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370418, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.288969] env[62923]: DEBUG oslo_vmware.api [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370419, 'name': PowerOnVM_Task, 'duration_secs': 0.481883} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.289413] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.387238] env[62923]: DEBUG nova.network.neutron [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.452808] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370418, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.515639] env[62923]: DEBUG nova.compute.manager [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1032.516622] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a06b4b8-ba79-4ddf-be76-cf545c1049ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.575940] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bf65c1-6635-443c-8523-baa5c989d26b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.583180] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6c7578-9860-4f6d-b569-301586795006 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.615958] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631a0590-3d6c-4d31-9616-6a3c32c22db6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.622246] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b9d435-bc2f-45c9-b730-84b58e95b7e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.637923] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1032.890827] env[62923]: DEBUG oslo_concurrency.lockutils [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.943906] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.943906] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.943906] env[62923]: INFO nova.compute.manager [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Attaching volume 46d4817c-b1e2-4f6b-a75d-5b24a3acd626 to /dev/sdb [ 1032.962150] env[62923]: DEBUG oslo_vmware.api [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370418, 'name': PowerOnVM_Task, 'duration_secs': 1.204692} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.963410] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.963410] env[62923]: INFO nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1032.963500] env[62923]: DEBUG nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1032.964986] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a3bb4d-50da-40a4-85eb-6e51cadf6e60 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.035483] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5566b5-c9b4-415a-911f-d5cc13adb9fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.039863] env[62923]: DEBUG oslo_concurrency.lockutils [None req-14418e80-e852-4dff-b51b-551e9ca2d91f tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.225s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.044533] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57707970-b0d3-42cd-9417-f36e836b8c44 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.057650] env[62923]: DEBUG nova.virt.block_device [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updating existing volume attachment record: 91464bcb-3f68-4623-ba1c-d4136dd8c501 {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1033.168800] env[62923]: ERROR nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [req-b83535e4-6b22-468a-be74-e1293c889eaf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b83535e4-6b22-468a-be74-e1293c889eaf"}]} [ 1033.192170] env[62923]: DEBUG nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1033.211117] env[62923]: DEBUG nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1033.212216] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1033.232151] env[62923]: DEBUG nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1033.232151] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 135 to 136 during operation: update_aggregates {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1033.250663] env[62923]: DEBUG nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1033.388173] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abd6dbe-da17-414a-8077-39cb9753438f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.395278] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8609dfde-d615-4fec-9d3f-4124cf80202d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.433277] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b437f679-091e-4290-bb27-16ae93e276f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.436427] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090c5945-da0d-4dc6-9ddf-29292c0ada3d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.456465] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23a8157-79dd-4213-ac98-9707c0364a57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.461060] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7728c96-ee7c-4ef7-89d6-aa2ba36235b0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.473379] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1033.478257] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 83 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1033.494723] env[62923]: INFO nova.compute.manager [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Took 16.53 seconds to build instance. [ 1033.782962] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "996cb68a-4a18-488d-890f-ace24dcd4c42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.783218] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.988772] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.989129] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfd92829-9e1c-4f51-8a1b-5c69b2309929 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.996349] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62058043-6da8-4a18-a4e3-67fbc460a869 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.043s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.996690] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1033.996690] env[62923]: value = "task-1370423" [ 1033.996690] env[62923]: _type = "Task" [ 1033.996690] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.004979] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.030936] env[62923]: DEBUG nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1034.031124] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 136 to 137 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1034.031315] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1034.286307] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1034.506471] env[62923]: DEBUG oslo_vmware.api [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370423, 'name': PowerOnVM_Task, 'duration_secs': 0.489494} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.506756] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.506952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-407982ec-c8ba-4a12-b32a-20635ab90773 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance '1fef5eb2-acb0-4d00-81a3-c270af7df0e8' progress to 100 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1034.537976] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.154s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.538521] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1034.542292] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.117s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.546230] env[62923]: INFO nova.compute.claims [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.746197] env[62923]: DEBUG nova.compute.manager [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Received event network-changed-bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.746379] env[62923]: DEBUG nova.compute.manager [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Refreshing instance network info cache due to event network-changed-bb561b4b-5c6a-4cc2-b404-07800286d632. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1034.746795] env[62923]: DEBUG oslo_concurrency.lockutils [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] Acquiring lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.747024] env[62923]: DEBUG oslo_concurrency.lockutils [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] Acquired lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.749831] env[62923]: DEBUG nova.network.neutron [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Refreshing network info cache for port bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.885104] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.052622] env[62923]: DEBUG nova.compute.utils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1035.054290] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1035.062467] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.127192] env[62923]: DEBUG nova.policy [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715bac2639c841dba876a5c1b74e6ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7373f1d735a4f51a8524e0aa4b39b50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1035.516165] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Successfully created port: 3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.538294] env[62923]: DEBUG nova.network.neutron [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updated VIF entry in instance network info cache for port bb561b4b-5c6a-4cc2-b404-07800286d632. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.538783] env[62923]: DEBUG nova.network.neutron [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updating instance_info_cache with network_info: [{"id": "bb561b4b-5c6a-4cc2-b404-07800286d632", "address": "fa:16:3e:f9:b5:7d", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb561b4b-5c", "ovs_interfaceid": "bb561b4b-5c6a-4cc2-b404-07800286d632", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.563893] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1035.731816] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33923c50-9ed5-4d6f-add7-0dab2d7c3eb7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.740638] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b4bb3b-9def-48e0-9825-4054f5d586ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.774890] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acca3e9-0701-463e-b74b-7676b1d03617 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.783440] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3057b6-1288-4b7b-a903-14c0930fba47 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.797069] env[62923]: DEBUG nova.compute.provider_tree [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.041620] env[62923]: DEBUG oslo_concurrency.lockutils [req-3d640cf4-687b-4d3d-8685-27f6621fc36b req-51553e3c-d6ef-466a-a822-3f430849ec43 service nova] Releasing lock "refresh_cache-aae1a2a3-57da-4846-8240-ac0626e9ebd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.300166] env[62923]: DEBUG nova.scheduler.client.report [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.576352] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1036.626796] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1036.627060] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1036.627227] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.627474] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1036.627640] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.627831] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1036.628096] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1036.628286] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1036.628502] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1036.628693] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1036.628903] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1036.630166] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257b8129-56d6-4a43-aeb2-293840725911 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.638334] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8692ee-78ed-42d9-96cb-b5a730adabdb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.806312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.806904] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1036.809641] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.978s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.811080] env[62923]: INFO nova.compute.claims [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.006405] env[62923]: DEBUG nova.compute.manager [req-116a6b09-ba18-4712-ac79-ce8b7008a268 req-9198fa9b-429d-4ce7-b061-5aa056a0773b service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Received event network-vif-plugged-3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1037.006655] env[62923]: DEBUG oslo_concurrency.lockutils [req-116a6b09-ba18-4712-ac79-ce8b7008a268 req-9198fa9b-429d-4ce7-b061-5aa056a0773b service nova] Acquiring lock "62889af3-06e9-4f5e-9ab0-87024e0678ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.006867] env[62923]: DEBUG oslo_concurrency.lockutils [req-116a6b09-ba18-4712-ac79-ce8b7008a268 req-9198fa9b-429d-4ce7-b061-5aa056a0773b service nova] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.007321] env[62923]: DEBUG oslo_concurrency.lockutils [req-116a6b09-ba18-4712-ac79-ce8b7008a268 req-9198fa9b-429d-4ce7-b061-5aa056a0773b service nova] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.007523] env[62923]: DEBUG nova.compute.manager [req-116a6b09-ba18-4712-ac79-ce8b7008a268 req-9198fa9b-429d-4ce7-b061-5aa056a0773b service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] No waiting events found dispatching network-vif-plugged-3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1037.007699] env[62923]: WARNING nova.compute.manager [req-116a6b09-ba18-4712-ac79-ce8b7008a268 req-9198fa9b-429d-4ce7-b061-5aa056a0773b service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Received unexpected event network-vif-plugged-3318496a-5cdc-4d39-a09b-2cbe7691b4f3 for instance with vm_state building and task_state spawning. [ 1037.066058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.066058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.066241] env[62923]: DEBUG nova.compute.manager [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Going to confirm migration 4 {{(pid=62923) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1037.084931] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Successfully updated port: 3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.318900] env[62923]: DEBUG nova.compute.utils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1037.321175] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1037.321277] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.367302] env[62923]: DEBUG nova.policy [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715bac2639c841dba876a5c1b74e6ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7373f1d735a4f51a8524e0aa4b39b50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1037.588999] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "refresh_cache-62889af3-06e9-4f5e-9ab0-87024e0678ca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.589208] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "refresh_cache-62889af3-06e9-4f5e-9ab0-87024e0678ca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.590041] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.615711] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1037.615883] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1037.616885] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce48d1c-b28e-4957-aeeb-93606b230767 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.634282] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6e1684-065c-4f93-890b-65c6b0b2e704 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.637721] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.637915] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.638079] env[62923]: DEBUG nova.network.neutron [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.638344] env[62923]: DEBUG nova.objects.instance [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'info_cache' on Instance uuid 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.662116] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626/volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.663080] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-851e2b84-bf6c-44c3-aea3-ff0619aae7d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.676350] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Successfully created port: b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.684440] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1037.684440] env[62923]: value = "task-1370425" [ 1037.684440] env[62923]: _type = "Task" [ 1037.684440] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.693015] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.821671] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1037.986034] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0f5b1e-4b15-4174-b236-ccd2f1d2b0c9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.993730] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0698517c-3cbb-4184-ac17-582b34d2fb80 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.025065] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d039ee-0f01-43e2-941a-e9c3fccfa7a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.032850] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178428e9-fdda-4fc5-81e2-860385f42532 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.046686] env[62923]: DEBUG nova.compute.provider_tree [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.124311] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.196451] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.260316] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Updating instance_info_cache with network_info: [{"id": "3318496a-5cdc-4d39-a09b-2cbe7691b4f3", "address": "fa:16:3e:11:78:cc", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3318496a-5c", "ovs_interfaceid": "3318496a-5cdc-4d39-a09b-2cbe7691b4f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.549305] env[62923]: DEBUG nova.scheduler.client.report [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1038.695566] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370425, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.762547] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "refresh_cache-62889af3-06e9-4f5e-9ab0-87024e0678ca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.762892] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Instance network_info: |[{"id": "3318496a-5cdc-4d39-a09b-2cbe7691b4f3", "address": "fa:16:3e:11:78:cc", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3318496a-5c", "ovs_interfaceid": "3318496a-5cdc-4d39-a09b-2cbe7691b4f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1038.763338] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:78:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3318496a-5cdc-4d39-a09b-2cbe7691b4f3', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.774785] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Creating folder: Project (a7373f1d735a4f51a8524e0aa4b39b50). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1038.778236] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8761686f-2c52-40c0-9dfa-043b7905ae8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.790186] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Created folder: Project (a7373f1d735a4f51a8524e0aa4b39b50) in parent group-v291405. [ 1038.790186] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Creating folder: Instances. Parent ref: group-v291532. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1038.791042] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba782ae6-a725-45ac-b634-81340557d4a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.801042] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Created folder: Instances in parent group-v291532. [ 1038.801042] env[62923]: DEBUG oslo.service.loopingcall [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.801042] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.801042] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6dd0334d-43d2-4ce3-acee-6aac35d992b3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.825596] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.825596] env[62923]: value = "task-1370428" [ 1038.825596] env[62923]: _type = "Task" [ 1038.825596] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.834260] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1038.836039] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370428, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.870498] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1038.870739] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1038.870904] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.871107] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1038.871263] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.871416] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1038.871618] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1038.871775] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1038.871939] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1038.872112] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1038.872289] env[62923]: DEBUG nova.virt.hardware [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1038.873157] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34723d6-1355-4fdb-9d6f-0c44c8a11ce2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.880888] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f86c4a-5213-48c4-af70-2f2459656cde {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.924467] env[62923]: DEBUG nova.network.neutron [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [{"id": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "address": "fa:16:3e:38:95:72", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb7d101-34", "ovs_interfaceid": "1fb7d101-34b0-45db-b473-84c94e4b9aaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.033363] env[62923]: DEBUG nova.compute.manager [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Received event network-changed-3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1039.033548] env[62923]: DEBUG nova.compute.manager [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Refreshing instance network info cache due to event network-changed-3318496a-5cdc-4d39-a09b-2cbe7691b4f3. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1039.033788] env[62923]: DEBUG oslo_concurrency.lockutils [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] Acquiring lock "refresh_cache-62889af3-06e9-4f5e-9ab0-87024e0678ca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.033910] env[62923]: DEBUG oslo_concurrency.lockutils [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] Acquired lock "refresh_cache-62889af3-06e9-4f5e-9ab0-87024e0678ca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.034087] env[62923]: DEBUG nova.network.neutron [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Refreshing network info cache for port 3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.054159] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.054676] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1039.197537] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370425, 'name': ReconfigVM_Task, 'duration_secs': 1.043971} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.197874] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfigured VM instance instance-00000061 to attach disk [datastore2] volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626/volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.202702] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ac21409-aa7a-45ff-bf2c-3ad82db03e9a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.218175] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1039.218175] env[62923]: value = "task-1370429" [ 1039.218175] env[62923]: _type = "Task" [ 1039.218175] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.227236] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.336149] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370428, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.387478] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Successfully updated port: b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.427290] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-1fef5eb2-acb0-4d00-81a3-c270af7df0e8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.427546] env[62923]: DEBUG nova.objects.instance [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'migration_context' on Instance uuid 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.559027] env[62923]: DEBUG nova.compute.utils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1039.560432] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1039.560542] env[62923]: DEBUG nova.network.neutron [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.627046] env[62923]: DEBUG nova.policy [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87fc513cff384393831ddaad87f05c00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a29f3f0408a45af83ade2e3ae22deeb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1039.727984] env[62923]: DEBUG oslo_vmware.api [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370429, 'name': ReconfigVM_Task, 'duration_secs': 0.16695} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.728174] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1039.740998] env[62923]: DEBUG nova.network.neutron [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Updated VIF entry in instance network info cache for port 3318496a-5cdc-4d39-a09b-2cbe7691b4f3. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1039.741391] env[62923]: DEBUG nova.network.neutron [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Updating instance_info_cache with network_info: [{"id": "3318496a-5cdc-4d39-a09b-2cbe7691b4f3", "address": "fa:16:3e:11:78:cc", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3318496a-5c", "ovs_interfaceid": "3318496a-5cdc-4d39-a09b-2cbe7691b4f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.840978] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370428, 'name': CreateVM_Task, 'duration_secs': 0.563721} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.841183] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1039.842113] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.842294] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.842642] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1039.842924] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5a54258-72e5-4272-a354-76e9819f6bf6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.847863] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1039.847863] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a96d1e-901c-c4af-2f95-614be67ba1d5" [ 1039.847863] env[62923]: _type = "Task" [ 1039.847863] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.855693] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a96d1e-901c-c4af-2f95-614be67ba1d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.891673] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "refresh_cache-19e75201-8918-4b27-928b-633849222daf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.891855] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "refresh_cache-19e75201-8918-4b27-928b-633849222daf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.892023] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.900818] env[62923]: DEBUG nova.network.neutron [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Successfully created port: 37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1039.930225] env[62923]: DEBUG nova.objects.base [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Object Instance<1fef5eb2-acb0-4d00-81a3-c270af7df0e8> lazy-loaded attributes: info_cache,migration_context {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1039.931180] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ed1045-1a4c-4fe2-b126-96dc602459a7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.952975] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ccaba4-76cf-448d-bbe0-08f55b1a36f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.958465] env[62923]: DEBUG oslo_vmware.api [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1039.958465] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a31d8d-6acf-9110-ba06-465e999c15f6" [ 1039.958465] env[62923]: _type = "Task" [ 1039.958465] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.966080] env[62923]: DEBUG oslo_vmware.api [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a31d8d-6acf-9110-ba06-465e999c15f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.063214] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1040.243975] env[62923]: DEBUG oslo_concurrency.lockutils [req-07ce4c2b-3113-416c-b878-6e3b4233c24c req-5965cc9b-aadb-4aab-96ff-c5afaccf1a58 service nova] Releasing lock "refresh_cache-62889af3-06e9-4f5e-9ab0-87024e0678ca" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.358801] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a96d1e-901c-c4af-2f95-614be67ba1d5, 'name': SearchDatastore_Task, 'duration_secs': 0.013373} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.359184] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.359430] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.359665] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.359843] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.360108] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.360431] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-718d0e6c-c01b-4402-8b4d-35071a712e65 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.368708] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.368890] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.369631] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5211b68e-5cb6-4c64-ac72-0930eb68469d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.374647] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1040.374647] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52149be7-aaae-3648-a3da-4df4cfbb1d63" [ 1040.374647] env[62923]: _type = "Task" [ 1040.374647] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.381823] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52149be7-aaae-3648-a3da-4df4cfbb1d63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.431437] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.468093] env[62923]: DEBUG oslo_vmware.api [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52a31d8d-6acf-9110-ba06-465e999c15f6, 'name': SearchDatastore_Task, 'duration_secs': 0.006458} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.468405] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.468666] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.571870] env[62923]: DEBUG nova.network.neutron [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Updating instance_info_cache with network_info: [{"id": "b25ffa6d-fad0-4ba5-a95d-854bbf97af8f", "address": "fa:16:3e:98:a2:17", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb25ffa6d-fa", "ovs_interfaceid": "b25ffa6d-fad0-4ba5-a95d-854bbf97af8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.766116] env[62923]: DEBUG nova.objects.instance [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'flavor' on Instance uuid b459a438-c287-4fbd-80f5-b5d3c31b83c9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.885038] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52149be7-aaae-3648-a3da-4df4cfbb1d63, 'name': SearchDatastore_Task, 'duration_secs': 0.007968} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.885846] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1ba7dc1-4c51-4c02-93cb-cc7deb4a0038 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.890903] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1040.890903] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d7ba7b-f31b-89cb-eb9e-705e060f4c28" [ 1040.890903] env[62923]: _type = "Task" [ 1040.890903] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.898805] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d7ba7b-f31b-89cb-eb9e-705e060f4c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.060943] env[62923]: DEBUG nova.compute.manager [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Received event network-vif-plugged-b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.061082] env[62923]: DEBUG oslo_concurrency.lockutils [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] Acquiring lock "19e75201-8918-4b27-928b-633849222daf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.061302] env[62923]: DEBUG oslo_concurrency.lockutils [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] Lock "19e75201-8918-4b27-928b-633849222daf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.061481] env[62923]: DEBUG oslo_concurrency.lockutils [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] Lock "19e75201-8918-4b27-928b-633849222daf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.062373] env[62923]: DEBUG nova.compute.manager [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] No waiting events found dispatching network-vif-plugged-b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1041.062373] env[62923]: WARNING nova.compute.manager [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Received unexpected event network-vif-plugged-b25ffa6d-fad0-4ba5-a95d-854bbf97af8f for instance with vm_state building and task_state spawning. [ 1041.062373] env[62923]: DEBUG nova.compute.manager [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Received event network-changed-b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.062373] env[62923]: DEBUG nova.compute.manager [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Refreshing instance network info cache due to event network-changed-b25ffa6d-fad0-4ba5-a95d-854bbf97af8f. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1041.062373] env[62923]: DEBUG oslo_concurrency.lockutils [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] Acquiring lock "refresh_cache-19e75201-8918-4b27-928b-633849222daf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.074799] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1041.077194] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "refresh_cache-19e75201-8918-4b27-928b-633849222daf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.077493] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Instance network_info: |[{"id": "b25ffa6d-fad0-4ba5-a95d-854bbf97af8f", "address": "fa:16:3e:98:a2:17", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb25ffa6d-fa", "ovs_interfaceid": "b25ffa6d-fad0-4ba5-a95d-854bbf97af8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1041.080158] env[62923]: DEBUG oslo_concurrency.lockutils [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] Acquired lock "refresh_cache-19e75201-8918-4b27-928b-633849222daf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.080347] env[62923]: DEBUG nova.network.neutron [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Refreshing network info cache for port b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.082898] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:a2:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b25ffa6d-fad0-4ba5-a95d-854bbf97af8f', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1041.091259] env[62923]: DEBUG oslo.service.loopingcall [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1041.094641] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19e75201-8918-4b27-928b-633849222daf] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1041.096825] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-361fe38c-d084-459f-8352-6df0b19a5837 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.123843] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1041.124941] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1041.124941] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.124941] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1041.124941] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.124941] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1041.125189] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1041.125327] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1041.125489] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1041.125648] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1041.125814] env[62923]: DEBUG nova.virt.hardware [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1041.126998] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a84108-5890-484d-9a82-c2164bc9eccb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.133811] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1041.133811] env[62923]: value = "task-1370430" [ 1041.133811] env[62923]: _type = "Task" [ 1041.133811] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.141256] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a1ecbd-4c38-4847-8592-a7f23b8a3149 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.155008] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255f0945-8979-480e-ae16-e6fef595c5e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.158918] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370430, 'name': CreateVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.173150] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03ad162-a63b-4175-b9fc-f80d27232369 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.208997] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca51e922-10cc-47d2-9c15-823e56bcfd9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.217026] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82617339-0d1b-43fd-9a1e-2b1428e830cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.230963] env[62923]: DEBUG nova.compute.provider_tree [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.275832] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9fbd66dd-b646-4a5f-98b1-c10fba5a6fd2 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.331s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.377791] env[62923]: DEBUG nova.network.neutron [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Updated VIF entry in instance network info cache for port b25ffa6d-fad0-4ba5-a95d-854bbf97af8f. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.378152] env[62923]: DEBUG nova.network.neutron [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Updating instance_info_cache with network_info: [{"id": "b25ffa6d-fad0-4ba5-a95d-854bbf97af8f", "address": "fa:16:3e:98:a2:17", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb25ffa6d-fa", "ovs_interfaceid": "b25ffa6d-fad0-4ba5-a95d-854bbf97af8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.402013] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d7ba7b-f31b-89cb-eb9e-705e060f4c28, 'name': SearchDatastore_Task, 'duration_secs': 0.009913} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.402302] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.402579] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 62889af3-06e9-4f5e-9ab0-87024e0678ca/62889af3-06e9-4f5e-9ab0-87024e0678ca.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.403112] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a167edae-87ae-419d-b038-b98aabe627be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.409845] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1041.409845] env[62923]: value = "task-1370431" [ 1041.409845] env[62923]: _type = "Task" [ 1041.409845] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.418335] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.441091] env[62923]: DEBUG nova.compute.manager [req-c9aa7825-a6f2-435f-8a9e-6e94ec8302e9 req-c009eb64-2b02-4f25-8f7b-35db87e80e1c service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Received event network-vif-plugged-37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1041.441334] env[62923]: DEBUG oslo_concurrency.lockutils [req-c9aa7825-a6f2-435f-8a9e-6e94ec8302e9 req-c009eb64-2b02-4f25-8f7b-35db87e80e1c service nova] Acquiring lock "996cb68a-4a18-488d-890f-ace24dcd4c42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.441559] env[62923]: DEBUG oslo_concurrency.lockutils [req-c9aa7825-a6f2-435f-8a9e-6e94ec8302e9 req-c009eb64-2b02-4f25-8f7b-35db87e80e1c service nova] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.441737] env[62923]: DEBUG oslo_concurrency.lockutils [req-c9aa7825-a6f2-435f-8a9e-6e94ec8302e9 req-c009eb64-2b02-4f25-8f7b-35db87e80e1c service nova] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.441910] env[62923]: DEBUG nova.compute.manager [req-c9aa7825-a6f2-435f-8a9e-6e94ec8302e9 req-c009eb64-2b02-4f25-8f7b-35db87e80e1c service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] No waiting events found dispatching network-vif-plugged-37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1041.442116] env[62923]: WARNING nova.compute.manager [req-c9aa7825-a6f2-435f-8a9e-6e94ec8302e9 req-c009eb64-2b02-4f25-8f7b-35db87e80e1c service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Received unexpected event network-vif-plugged-37d34472-7599-4430-aa04-02edec45a5ab for instance with vm_state building and task_state spawning. [ 1041.534207] env[62923]: DEBUG nova.network.neutron [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Successfully updated port: 37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1041.644308] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370430, 'name': CreateVM_Task, 'duration_secs': 0.470304} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.644651] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19e75201-8918-4b27-928b-633849222daf] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.645274] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.645448] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.645822] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1041.646129] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f04cd53-9f17-47e6-90c5-cbec570c02ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.651189] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1041.651189] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5230cbb6-c3c0-b475-65db-f8c5c1510676" [ 1041.651189] env[62923]: _type = "Task" [ 1041.651189] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.660032] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5230cbb6-c3c0-b475-65db-f8c5c1510676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.736606] env[62923]: DEBUG nova.scheduler.client.report [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.773278] env[62923]: INFO nova.compute.manager [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Rebuilding instance [ 1041.822893] env[62923]: DEBUG nova.compute.manager [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1041.824725] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c514218b-a575-4aec-8332-c56bb88da012 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.881154] env[62923]: DEBUG oslo_concurrency.lockutils [req-5aeeb63e-eeca-4e64-ae73-fd7a52507aa4 req-4da5bdaa-98f3-4a3c-b83c-b6d4f0b71f3d service nova] Releasing lock "refresh_cache-19e75201-8918-4b27-928b-633849222daf" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.920120] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370431, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.037701] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "refresh_cache-996cb68a-4a18-488d-890f-ace24dcd4c42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.037850] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquired lock "refresh_cache-996cb68a-4a18-488d-890f-ace24dcd4c42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.037884] env[62923]: DEBUG nova.network.neutron [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.162628] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5230cbb6-c3c0-b475-65db-f8c5c1510676, 'name': SearchDatastore_Task, 'duration_secs': 0.075413} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.162954] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.163238] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.163490] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.163658] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.163880] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.164185] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65813b5b-7637-4b07-853b-bc43e8afa526 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.174365] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.174578] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.175404] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66250378-f24b-40d4-bf0d-7697aa17d9b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.181730] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1042.181730] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527dc5c6-2a40-8fa2-85d6-bfad582b63fb" [ 1042.181730] env[62923]: _type = "Task" [ 1042.181730] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.191265] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527dc5c6-2a40-8fa2-85d6-bfad582b63fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.336051] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.336051] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa47fcdc-8482-4737-b5e7-5a3c8def6ea9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.342343] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1042.342343] env[62923]: value = "task-1370432" [ 1042.342343] env[62923]: _type = "Task" [ 1042.342343] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.350041] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370432, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.420303] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615376} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.420623] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 62889af3-06e9-4f5e-9ab0-87024e0678ca/62889af3-06e9-4f5e-9ab0-87024e0678ca.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.420849] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.421181] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81667a01-4e24-47c7-abbf-9d2940545897 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.427769] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1042.427769] env[62923]: value = "task-1370433" [ 1042.427769] env[62923]: _type = "Task" [ 1042.427769] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.436453] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.569011] env[62923]: DEBUG nova.network.neutron [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1042.692397] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]527dc5c6-2a40-8fa2-85d6-bfad582b63fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014844} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.693124] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f22a5b-6895-47bf-9c24-838c3e2dd22f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.697421] env[62923]: DEBUG nova.network.neutron [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Updating instance_info_cache with network_info: [{"id": "37d34472-7599-4430-aa04-02edec45a5ab", "address": "fa:16:3e:54:85:53", "network": {"id": "4665698d-8f67-42c3-a804-0824df14322c", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1141698854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a29f3f0408a45af83ade2e3ae22deeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37d34472-75", "ovs_interfaceid": "37d34472-7599-4430-aa04-02edec45a5ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.699874] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1042.699874] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52af1c38-d3fc-359c-e83b-92c669729a78" [ 1042.699874] env[62923]: _type = "Task" [ 1042.699874] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.709414] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52af1c38-d3fc-359c-e83b-92c669729a78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.747522] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.279s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.852431] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370432, 'name': PowerOffVM_Task, 'duration_secs': 0.207461} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.852709] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.906055] env[62923]: INFO nova.compute.manager [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Detaching volume 46d4817c-b1e2-4f6b-a75d-5b24a3acd626 [ 1042.937157] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061342} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.938181] env[62923]: INFO nova.virt.block_device [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Attempting to driver detach volume 46d4817c-b1e2-4f6b-a75d-5b24a3acd626 from mountpoint /dev/sdb [ 1042.938405] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1042.938620] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1042.938922] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1042.939696] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13da050-4f41-4673-9b5e-e35a16171c89 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.942717] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f482c1fc-e57a-4119-8231-4b2ab42e8e1c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.964200] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 62889af3-06e9-4f5e-9ab0-87024e0678ca/62889af3-06e9-4f5e-9ab0-87024e0678ca.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1042.981049] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76ee847d-b613-444e-9668-f5da0c2e4056 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.995442] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cea417-0aea-478f-be39-81f2f65e3b9b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.002470] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c7de70-3749-4d1a-9070-d16650d12230 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.005578] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1043.005578] env[62923]: value = "task-1370434" [ 1043.005578] env[62923]: _type = "Task" [ 1043.005578] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.025061] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466582b3-fc30-4d43-ac51-558214b6b8a0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.030570] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370434, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.044665] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] The volume has not been displaced from its original location: [datastore2] volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626/volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1043.049971] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1043.050327] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f0745fc-2c50-44f0-a403-78a85c1953c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.068577] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1043.068577] env[62923]: value = "task-1370435" [ 1043.068577] env[62923]: _type = "Task" [ 1043.068577] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.076372] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370435, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.204843] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Releasing lock "refresh_cache-996cb68a-4a18-488d-890f-ace24dcd4c42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.205251] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Instance network_info: |[{"id": "37d34472-7599-4430-aa04-02edec45a5ab", "address": "fa:16:3e:54:85:53", "network": {"id": "4665698d-8f67-42c3-a804-0824df14322c", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1141698854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a29f3f0408a45af83ade2e3ae22deeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37d34472-75", "ovs_interfaceid": "37d34472-7599-4430-aa04-02edec45a5ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1043.205816] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:85:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afb671bc-328c-40bf-9c2a-d98695e3d60c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '37d34472-7599-4430-aa04-02edec45a5ab', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.214146] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Creating folder: Project (1a29f3f0408a45af83ade2e3ae22deeb). Parent ref: group-v291405. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.214940] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54068d55-09b7-4782-b341-4fffa2cfa899 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.220057] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52af1c38-d3fc-359c-e83b-92c669729a78, 'name': SearchDatastore_Task, 'duration_secs': 0.046355} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.220682] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.220942] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 19e75201-8918-4b27-928b-633849222daf/19e75201-8918-4b27-928b-633849222daf.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.221221] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d53dcbb7-ad2c-46e0-9017-2860a7bcd7b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.228127] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1043.228127] env[62923]: value = "task-1370437" [ 1043.228127] env[62923]: _type = "Task" [ 1043.228127] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.232154] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Created folder: Project (1a29f3f0408a45af83ade2e3ae22deeb) in parent group-v291405. [ 1043.232385] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Creating folder: Instances. Parent ref: group-v291536. {{(pid=62923) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1043.232980] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f274ffc8-ddaa-4144-b58b-66b262a77138 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.237989] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.242449] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Created folder: Instances in parent group-v291536. [ 1043.242725] env[62923]: DEBUG oslo.service.loopingcall [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1043.242957] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.243214] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4fdc426-dd4c-41db-8c68-aef86156f321 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.267125] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.267125] env[62923]: value = "task-1370439" [ 1043.267125] env[62923]: _type = "Task" [ 1043.267125] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.276439] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370439, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.312149] env[62923]: INFO nova.scheduler.client.report [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted allocation for migration ae5ecb83-a61f-457c-ae39-66bc710f74df [ 1043.468447] env[62923]: DEBUG nova.compute.manager [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Received event network-changed-37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.468678] env[62923]: DEBUG nova.compute.manager [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Refreshing instance network info cache due to event network-changed-37d34472-7599-4430-aa04-02edec45a5ab. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1043.468906] env[62923]: DEBUG oslo_concurrency.lockutils [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] Acquiring lock "refresh_cache-996cb68a-4a18-488d-890f-ace24dcd4c42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.469082] env[62923]: DEBUG oslo_concurrency.lockutils [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] Acquired lock "refresh_cache-996cb68a-4a18-488d-890f-ace24dcd4c42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.469228] env[62923]: DEBUG nova.network.neutron [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Refreshing network info cache for port 37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.516624] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370434, 'name': ReconfigVM_Task, 'duration_secs': 0.323478} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.516927] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 62889af3-06e9-4f5e-9ab0-87024e0678ca/62889af3-06e9-4f5e-9ab0-87024e0678ca.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.517688] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6f2f99c-d5f5-47cc-b5a8-fd9e74e4517a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.524754] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1043.524754] env[62923]: value = "task-1370440" [ 1043.524754] env[62923]: _type = "Task" [ 1043.524754] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.533646] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370440, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.578460] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370435, 'name': ReconfigVM_Task, 'duration_secs': 0.199054} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.578930] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1043.583729] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00be887e-8096-44bf-a9f7-382ea579ad0d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.599414] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1043.599414] env[62923]: value = "task-1370441" [ 1043.599414] env[62923]: _type = "Task" [ 1043.599414] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.608040] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370441, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.739027] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370437, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.778768] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370439, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.818985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d24cdedb-79d3-4ec7-b657-4ed316ec527a tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.753s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.035056] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370440, 'name': Rename_Task, 'duration_secs': 0.270703} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.035373] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.035634] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12fefde0-0787-4f1f-82c6-6502c28fb997 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.041677] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1044.041677] env[62923]: value = "task-1370442" [ 1044.041677] env[62923]: _type = "Task" [ 1044.041677] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.050383] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.110481] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370441, 'name': ReconfigVM_Task, 'duration_secs': 0.205057} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.110842] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1044.214236] env[62923]: DEBUG nova.network.neutron [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Updated VIF entry in instance network info cache for port 37d34472-7599-4430-aa04-02edec45a5ab. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.214634] env[62923]: DEBUG nova.network.neutron [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Updating instance_info_cache with network_info: [{"id": "37d34472-7599-4430-aa04-02edec45a5ab", "address": "fa:16:3e:54:85:53", "network": {"id": "4665698d-8f67-42c3-a804-0824df14322c", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1141698854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a29f3f0408a45af83ade2e3ae22deeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37d34472-75", "ovs_interfaceid": "37d34472-7599-4430-aa04-02edec45a5ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.228816] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.229069] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.229293] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.229475] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.229643] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.231672] env[62923]: INFO nova.compute.manager [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Terminating instance [ 1044.238094] env[62923]: DEBUG nova.compute.manager [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1044.238291] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.239240] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3005f832-bf77-460b-8793-7c4247a84f9e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.248924] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370437, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575261} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.249413] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.249668] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 19e75201-8918-4b27-928b-633849222daf/19e75201-8918-4b27-928b-633849222daf.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1044.249870] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1044.250093] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59fb7d24-4aa5-43ac-9aab-a86679536fe6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.251627] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-589d104c-1484-411f-ac80-954613214e72 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.257245] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1044.257245] env[62923]: value = "task-1370444" [ 1044.257245] env[62923]: _type = "Task" [ 1044.257245] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.258432] env[62923]: DEBUG oslo_vmware.api [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1044.258432] env[62923]: value = "task-1370443" [ 1044.258432] env[62923]: _type = "Task" [ 1044.258432] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.269082] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.274437] env[62923]: DEBUG oslo_vmware.api [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.279456] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370439, 'name': CreateVM_Task, 'duration_secs': 0.638731} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.279597] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.280245] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.280413] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.280732] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.280974] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f974206-8ed7-401b-9eb6-278f4f790f33 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.285140] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1044.285140] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b9db6f-5f1d-e8a8-1924-926d578d63ff" [ 1044.285140] env[62923]: _type = "Task" [ 1044.285140] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.292341] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b9db6f-5f1d-e8a8-1924-926d578d63ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.552070] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370442, 'name': PowerOnVM_Task, 'duration_secs': 0.465147} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.552070] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1044.552070] env[62923]: INFO nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1044.552070] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1044.552653] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a0c5fd-897d-4eab-aec4-c7bbec4f93db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.717971] env[62923]: DEBUG oslo_concurrency.lockutils [req-3083e57f-48d1-48f8-ac2a-4a3d45071327 req-595c0ccb-264a-4134-9ce7-d3d6f33f7ae2 service nova] Releasing lock "refresh_cache-996cb68a-4a18-488d-890f-ace24dcd4c42" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.771919] env[62923]: DEBUG oslo_vmware.api [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370443, 'name': PowerOffVM_Task, 'duration_secs': 0.19077} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.775147] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.775398] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.775689] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067447} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.775893] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fc2a62c-54bc-4fc2-903d-56ad1e453698 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.777283] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.778010] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a22af4-2af9-4693-8395-d0a4a0605c26 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.799351] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 19e75201-8918-4b27-928b-633849222daf/19e75201-8918-4b27-928b-633849222daf.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.802469] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e45a2c22-0ee0-4669-bcec-d28a7d327f68 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.822011] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b9db6f-5f1d-e8a8-1924-926d578d63ff, 'name': SearchDatastore_Task, 'duration_secs': 0.00876} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.823153] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.823391] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.823629] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.823778] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.823956] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.824274] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1044.824274] env[62923]: value = "task-1370446" [ 1044.824274] env[62923]: _type = "Task" [ 1044.824274] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.824458] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bcd5fc8-d878-4ce5-81e8-7ebcd501f0ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.833504] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370446, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.834488] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.834663] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.835327] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72377189-efc7-4885-971f-78dc552b4fd5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.839706] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1044.839706] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f4ba56-3f48-db2f-7c67-37a211d938ae" [ 1044.839706] env[62923]: _type = "Task" [ 1044.839706] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.846647] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f4ba56-3f48-db2f-7c67-37a211d938ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.863811] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.864049] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.864238] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleting the datastore file [datastore2] 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.864514] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f29b6667-4e09-4600-95e2-03e89af4ca8c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.870864] env[62923]: DEBUG oslo_vmware.api [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1044.870864] env[62923]: value = "task-1370447" [ 1044.870864] env[62923]: _type = "Task" [ 1044.870864] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.878427] env[62923]: DEBUG oslo_vmware.api [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.068611] env[62923]: INFO nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Took 13.71 seconds to build instance. [ 1045.160170] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.160468] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11f32790-9175-4e14-8b1f-922eed39468d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.167654] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1045.167654] env[62923]: value = "task-1370448" [ 1045.167654] env[62923]: _type = "Task" [ 1045.167654] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.177075] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] VM already powered off {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1045.177270] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1045.177463] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1045.178174] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014d05fa-3557-4dae-b073-a207950b5caf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.195414] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef73ab88-0472-4a96-bb26-a0beabfbe51b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.200782] env[62923]: WARNING nova.virt.vmwareapi.driver [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1045.201040] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.201786] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e6b015-8813-4e62-8927-20b0d2a4f0f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.207453] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1045.207667] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8b6dd32-b038-48f5-b51b-18f5128857ad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.275701] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1045.275934] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1045.276136] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1045.276472] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb12d492-c068-460a-b49f-7c4c9489f134 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.283079] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1045.283079] env[62923]: value = "task-1370450" [ 1045.283079] env[62923]: _type = "Task" [ 1045.283079] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.290182] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.335201] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370446, 'name': ReconfigVM_Task, 'duration_secs': 0.283439} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.335512] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 19e75201-8918-4b27-928b-633849222daf/19e75201-8918-4b27-928b-633849222daf.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.336136] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e57a4030-57fe-4443-b8c0-223bb6afe426 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.344052] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1045.344052] env[62923]: value = "task-1370451" [ 1045.344052] env[62923]: _type = "Task" [ 1045.344052] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.350517] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f4ba56-3f48-db2f-7c67-37a211d938ae, 'name': SearchDatastore_Task, 'duration_secs': 0.011199} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.351577] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0df623c0-7436-4f66-9ced-469ca7c4c681 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.356622] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370451, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.359236] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1045.359236] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528094f5-9b74-afd9-05ba-a720475251cb" [ 1045.359236] env[62923]: _type = "Task" [ 1045.359236] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.366570] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528094f5-9b74-afd9-05ba-a720475251cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.378979] env[62923]: DEBUG oslo_vmware.api [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151352} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.379234] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.379419] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.379596] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.379767] env[62923]: INFO nova.compute.manager [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1045.379992] env[62923]: DEBUG oslo.service.loopingcall [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.380196] env[62923]: DEBUG nova.compute.manager [-] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1045.380287] env[62923]: DEBUG nova.network.neutron [-] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.571117] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.217s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.794118] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.804291] env[62923]: DEBUG nova.compute.manager [req-1a8bc061-21ef-4a27-ad6e-ad73892c54eb req-cd71ac28-c43e-4e57-bc53-47c9171a2e0a service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Received event network-vif-deleted-1fb7d101-34b0-45db-b473-84c94e4b9aaa {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1045.804511] env[62923]: INFO nova.compute.manager [req-1a8bc061-21ef-4a27-ad6e-ad73892c54eb req-cd71ac28-c43e-4e57-bc53-47c9171a2e0a service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Neutron deleted interface 1fb7d101-34b0-45db-b473-84c94e4b9aaa; detaching it from the instance and deleting it from the info cache [ 1045.804688] env[62923]: DEBUG nova.network.neutron [req-1a8bc061-21ef-4a27-ad6e-ad73892c54eb req-cd71ac28-c43e-4e57-bc53-47c9171a2e0a service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.854129] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370451, 'name': Rename_Task, 'duration_secs': 0.13009} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.854452] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.854717] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f7cc7d5-f3c0-4c0c-93d2-3abc9fd78fea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.863636] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1045.863636] env[62923]: value = "task-1370452" [ 1045.863636] env[62923]: _type = "Task" [ 1045.863636] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.869858] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528094f5-9b74-afd9-05ba-a720475251cb, 'name': SearchDatastore_Task, 'duration_secs': 0.016098} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.870446] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.870735] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 996cb68a-4a18-488d-890f-ace24dcd4c42/996cb68a-4a18-488d-890f-ace24dcd4c42.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.870987] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f0b5747-bef2-467f-98d7-a34eded04e3b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.876016] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.880046] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1045.880046] env[62923]: value = "task-1370453" [ 1045.880046] env[62923]: _type = "Task" [ 1045.880046] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.887835] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.280662] env[62923]: DEBUG nova.network.neutron [-] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.297689] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.307684] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e4cbe841-9ccb-4174-9206-0035cd81262a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.318518] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4e8b36-16ed-4061-9176-c40f95f1097f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.349185] env[62923]: DEBUG nova.compute.manager [req-1a8bc061-21ef-4a27-ad6e-ad73892c54eb req-cd71ac28-c43e-4e57-bc53-47c9171a2e0a service nova] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Detach interface failed, port_id=1fb7d101-34b0-45db-b473-84c94e4b9aaa, reason: Instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1046.373888] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370452, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.389459] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478262} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.389735] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 996cb68a-4a18-488d-890f-ace24dcd4c42/996cb68a-4a18-488d-890f-ace24dcd4c42.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.389952] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.390230] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c730127-88fd-44c1-b9d5-e9aaa023f953 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.396536] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1046.396536] env[62923]: value = "task-1370454" [ 1046.396536] env[62923]: _type = "Task" [ 1046.396536] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.404543] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.783942] env[62923]: INFO nova.compute.manager [-] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Took 1.40 seconds to deallocate network for instance. [ 1046.801478] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.875801] env[62923]: DEBUG oslo_vmware.api [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370452, 'name': PowerOnVM_Task, 'duration_secs': 0.667398} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.876058] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.876310] env[62923]: INFO nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Took 8.04 seconds to spawn the instance on the hypervisor. [ 1046.876508] env[62923]: DEBUG nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1046.877289] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf45c01-b19b-41ac-a6ff-b4c0b015686b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.904795] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065281} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.904964] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.906346] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a25cd6-f3e9-4eb3-b619-867929169c34 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.927615] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 996cb68a-4a18-488d-890f-ace24dcd4c42/996cb68a-4a18-488d-890f-ace24dcd4c42.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.928088] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c70f98f7-1c6c-41df-8df8-3bd8c7934663 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.948204] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1046.948204] env[62923]: value = "task-1370455" [ 1046.948204] env[62923]: _type = "Task" [ 1046.948204] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.956276] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.297320] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.297695] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.297920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.300487] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.698274} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.300955] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.301200] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1047.301336] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1047.324766] env[62923]: INFO nova.scheduler.client.report [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted allocations for instance 1fef5eb2-acb0-4d00-81a3-c270af7df0e8 [ 1047.395008] env[62923]: INFO nova.compute.manager [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Took 16.00 seconds to build instance. [ 1047.459682] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370455, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.808096] env[62923]: INFO nova.virt.block_device [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Booting with volume 46d4817c-b1e2-4f6b-a75d-5b24a3acd626 at /dev/sdb [ 1047.832088] env[62923]: DEBUG oslo_concurrency.lockutils [None req-afa2c877-b402-4714-be54-f0efbaadc448 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "1fef5eb2-acb0-4d00-81a3-c270af7df0e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.603s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.844441] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9bb65cc-fd5b-4b39-9c42-d3d102b0318f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.854610] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d5ecc2-204b-4254-bf58-27e51c9c9d9f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.884283] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1728f27-db14-43d6-a51b-49968cc72cd9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.893059] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42366595-acce-49d0-a407-17d22f5c8e86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.904069] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15f4a3b9-16be-4a93-bc71-16466a3efe68 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "19e75201-8918-4b27-928b-633849222daf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.516s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.923162] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4942ade6-0f60-4307-abbe-7493e22acf5c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.930756] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5879a2-a300-49c5-b5e7-2ced8f1615a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.947407] env[62923]: DEBUG nova.virt.block_device [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updating existing volume attachment record: 4411eb06-37d2-4fe5-ad9d-563bbb0df075 {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1047.959329] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370455, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.126951] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "62889af3-06e9-4f5e-9ab0-87024e0678ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.127235] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.127771] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "62889af3-06e9-4f5e-9ab0-87024e0678ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.127976] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.128168] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.130313] env[62923]: INFO nova.compute.manager [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Terminating instance [ 1048.131873] env[62923]: DEBUG nova.compute.manager [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1048.132081] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.132893] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08834b11-2109-4c6b-bce8-e576c7e5abef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.141682] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.141929] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7413ff6e-239d-4aac-a913-ea26ee9b590f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.148939] env[62923]: DEBUG oslo_vmware.api [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1048.148939] env[62923]: value = "task-1370456" [ 1048.148939] env[62923]: _type = "Task" [ 1048.148939] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.158071] env[62923]: DEBUG oslo_vmware.api [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.193571] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "19e75201-8918-4b27-928b-633849222daf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.193895] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "19e75201-8918-4b27-928b-633849222daf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.194177] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "19e75201-8918-4b27-928b-633849222daf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.194438] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "19e75201-8918-4b27-928b-633849222daf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.194676] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "19e75201-8918-4b27-928b-633849222daf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.197150] env[62923]: INFO nova.compute.manager [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Terminating instance [ 1048.199142] env[62923]: DEBUG nova.compute.manager [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1048.199547] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.200226] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378f3eaf-6714-44fd-a906-748956a0344b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.208726] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.208985] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b680f014-8d25-4bea-bcb6-6906958c7c9d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.216787] env[62923]: DEBUG oslo_vmware.api [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1048.216787] env[62923]: value = "task-1370457" [ 1048.216787] env[62923]: _type = "Task" [ 1048.216787] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.227369] env[62923]: DEBUG oslo_vmware.api [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.461201] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370455, 'name': ReconfigVM_Task, 'duration_secs': 1.047997} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.461536] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 996cb68a-4a18-488d-890f-ace24dcd4c42/996cb68a-4a18-488d-890f-ace24dcd4c42.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.462231] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9537acfb-2bb7-427a-b716-6089af78e491 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.470183] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1048.470183] env[62923]: value = "task-1370458" [ 1048.470183] env[62923]: _type = "Task" [ 1048.470183] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.479024] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370458, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.613061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.613061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.613061] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.613423] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.613675] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.615821] env[62923]: INFO nova.compute.manager [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Terminating instance [ 1048.617687] env[62923]: DEBUG nova.compute.manager [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1048.617877] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.618732] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b00ffe-feee-4c35-9247-1bfe49e8457e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.626943] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.627221] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bb7fb44-555b-461f-9958-16a210a5fc40 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.635306] env[62923]: DEBUG oslo_vmware.api [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1048.635306] env[62923]: value = "task-1370459" [ 1048.635306] env[62923]: _type = "Task" [ 1048.635306] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.643296] env[62923]: DEBUG oslo_vmware.api [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.657763] env[62923]: DEBUG oslo_vmware.api [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370456, 'name': PowerOffVM_Task, 'duration_secs': 0.243858} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.658041] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.658215] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.658460] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-492bb05d-c6f6-40ef-8eec-81489c74fb34 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.723641] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.723875] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.724081] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleting the datastore file [datastore2] 62889af3-06e9-4f5e-9ab0-87024e0678ca {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.724357] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0596db12-195d-40ee-8a67-5ffc8051420d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.730632] env[62923]: DEBUG oslo_vmware.api [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370457, 'name': PowerOffVM_Task, 'duration_secs': 0.233894} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.731912] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.732124] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.732432] env[62923]: DEBUG oslo_vmware.api [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1048.732432] env[62923]: value = "task-1370461" [ 1048.732432] env[62923]: _type = "Task" [ 1048.732432] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.732682] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e415ee45-a44a-4e7a-9d68-0de995d7650d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.743238] env[62923]: DEBUG oslo_vmware.api [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.809731] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.810023] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.810194] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleting the datastore file [datastore2] 19e75201-8918-4b27-928b-633849222daf {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.810497] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57cee788-0a7c-497b-8629-14c2689be287 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.818935] env[62923]: DEBUG oslo_vmware.api [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1048.818935] env[62923]: value = "task-1370463" [ 1048.818935] env[62923]: _type = "Task" [ 1048.818935] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.828328] env[62923]: DEBUG oslo_vmware.api [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.979766] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370458, 'name': Rename_Task, 'duration_secs': 0.135959} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.979997] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.980226] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd0b1726-7186-4fa9-94f4-e14761b2a533 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.986856] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1048.986856] env[62923]: value = "task-1370464" [ 1048.986856] env[62923]: _type = "Task" [ 1048.986856] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.996363] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370464, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.146936] env[62923]: DEBUG oslo_vmware.api [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370459, 'name': PowerOffVM_Task, 'duration_secs': 0.183549} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.147425] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.147497] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.147782] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0c5a968-ada4-4a9d-af2d-2d0fd56e2a96 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.221395] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.221648] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.221802] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Deleting the datastore file [datastore1] 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.223426] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-497682fe-863b-4f8a-9242-0c2aa48d0596 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.228779] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "222b944d-c58e-476e-b723-fc2b6990120a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.229120] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.236164] env[62923]: DEBUG oslo_vmware.api [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1049.236164] env[62923]: value = "task-1370466" [ 1049.236164] env[62923]: _type = "Task" [ 1049.236164] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.250351] env[62923]: DEBUG oslo_vmware.api [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144811} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.253543] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.253788] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.254023] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.254234] env[62923]: INFO nova.compute.manager [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1049.254537] env[62923]: DEBUG oslo.service.loopingcall [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.255013] env[62923]: DEBUG oslo_vmware.api [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.255264] env[62923]: DEBUG nova.compute.manager [-] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1049.255366] env[62923]: DEBUG nova.network.neutron [-] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.329900] env[62923]: DEBUG oslo_vmware.api [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134436} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.330181] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.330391] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.330539] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.330717] env[62923]: INFO nova.compute.manager [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: 19e75201-8918-4b27-928b-633849222daf] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1049.330957] env[62923]: DEBUG oslo.service.loopingcall [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.331163] env[62923]: DEBUG nova.compute.manager [-] [instance: 19e75201-8918-4b27-928b-633849222daf] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1049.331259] env[62923]: DEBUG nova.network.neutron [-] [instance: 19e75201-8918-4b27-928b-633849222daf] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.501009] env[62923]: DEBUG oslo_vmware.api [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370464, 'name': PowerOnVM_Task, 'duration_secs': 0.46967} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.501009] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.501009] env[62923]: INFO nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1049.501009] env[62923]: DEBUG nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1049.501009] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8435a3-64de-4224-a164-65a821a970dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.607592] env[62923]: DEBUG nova.compute.manager [req-1d626e2c-4562-457c-8c49-386ccc641bfa req-ed876512-4158-442f-b6a3-ec83991fd0d2 service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Received event network-vif-deleted-b25ffa6d-fad0-4ba5-a95d-854bbf97af8f {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.607788] env[62923]: INFO nova.compute.manager [req-1d626e2c-4562-457c-8c49-386ccc641bfa req-ed876512-4158-442f-b6a3-ec83991fd0d2 service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Neutron deleted interface b25ffa6d-fad0-4ba5-a95d-854bbf97af8f; detaching it from the instance and deleting it from the info cache [ 1049.607972] env[62923]: DEBUG nova.network.neutron [req-1d626e2c-4562-457c-8c49-386ccc641bfa req-ed876512-4158-442f-b6a3-ec83991fd0d2 service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.614623] env[62923]: DEBUG nova.compute.manager [req-922c2a2b-0f91-4b49-9ff7-6a7482ff1859 req-202a53ab-ed21-449d-b880-dfeb65e47e02 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Received event network-vif-deleted-3318496a-5cdc-4d39-a09b-2cbe7691b4f3 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.614975] env[62923]: INFO nova.compute.manager [req-922c2a2b-0f91-4b49-9ff7-6a7482ff1859 req-202a53ab-ed21-449d-b880-dfeb65e47e02 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Neutron deleted interface 3318496a-5cdc-4d39-a09b-2cbe7691b4f3; detaching it from the instance and deleting it from the info cache [ 1049.615227] env[62923]: DEBUG nova.network.neutron [req-922c2a2b-0f91-4b49-9ff7-6a7482ff1859 req-202a53ab-ed21-449d-b880-dfeb65e47e02 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.731818] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1049.750162] env[62923]: DEBUG oslo_vmware.api [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20059} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.750427] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.750615] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.750795] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.750966] env[62923]: INFO nova.compute.manager [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1049.751253] env[62923]: DEBUG oslo.service.loopingcall [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.751426] env[62923]: DEBUG nova.compute.manager [-] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1049.751520] env[62923]: DEBUG nova.network.neutron [-] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.016358] env[62923]: INFO nova.compute.manager [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Took 15.23 seconds to build instance. [ 1050.065926] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1050.066200] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1050.066442] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1050.066647] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1050.066798] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1050.066949] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1050.067332] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1050.067550] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1050.068158] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1050.068158] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1050.068296] env[62923]: DEBUG nova.virt.hardware [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1050.069410] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d994431-4ace-49ef-a0f0-2f58ada30c41 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.078181] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0187361-d9ad-4394-9675-d0a30630e7e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.082266] env[62923]: DEBUG nova.network.neutron [-] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.097821] env[62923]: DEBUG nova.network.neutron [-] [instance: 19e75201-8918-4b27-928b-633849222daf] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.099140] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:43:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35f893d5-3b23-4350-92a9-e3803a075eb0', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1050.106920] env[62923]: DEBUG oslo.service.loopingcall [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1050.107625] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1050.107847] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0566b42f-6a4b-4124-8ff4-f1878aecac29 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.124312] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be1e8905-6df7-4421-92ab-cf33ffe70e41 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.125971] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34595c67-f7e7-4c60-b15a-11ce62311074 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.138631] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3a5932-e36d-4d9f-91f1-97e619143e18 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.151696] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1050.151696] env[62923]: value = "task-1370467" [ 1050.151696] env[62923]: _type = "Task" [ 1050.151696] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.152403] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06103293-ae0c-4f43-8a3c-1d540ab167d9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.184625] env[62923]: DEBUG nova.compute.manager [req-1d626e2c-4562-457c-8c49-386ccc641bfa req-ed876512-4158-442f-b6a3-ec83991fd0d2 service nova] [instance: 19e75201-8918-4b27-928b-633849222daf] Detach interface failed, port_id=b25ffa6d-fad0-4ba5-a95d-854bbf97af8f, reason: Instance 19e75201-8918-4b27-928b-633849222daf could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1050.197960] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370467, 'name': CreateVM_Task} progress is 15%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.198388] env[62923]: DEBUG nova.compute.manager [req-922c2a2b-0f91-4b49-9ff7-6a7482ff1859 req-202a53ab-ed21-449d-b880-dfeb65e47e02 service nova] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Detach interface failed, port_id=3318496a-5cdc-4d39-a09b-2cbe7691b4f3, reason: Instance 62889af3-06e9-4f5e-9ab0-87024e0678ca could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1050.258093] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.258383] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.260218] env[62923]: INFO nova.compute.claims [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1050.518888] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68332740-c8c9-435a-95fe-b601ee311794 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.735s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.585383] env[62923]: INFO nova.compute.manager [-] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Took 1.33 seconds to deallocate network for instance. [ 1050.607870] env[62923]: INFO nova.compute.manager [-] [instance: 19e75201-8918-4b27-928b-633849222daf] Took 1.28 seconds to deallocate network for instance. [ 1050.659972] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370467, 'name': CreateVM_Task, 'duration_secs': 0.376665} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.660165] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1050.660909] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.661096] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.661423] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1050.661944] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aef6646-5879-4777-b3d7-0bcb889c0cb3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.667123] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1050.667123] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c36ef3-2f7f-c137-530b-6ff47043db20" [ 1050.667123] env[62923]: _type = "Task" [ 1050.667123] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.675802] env[62923]: DEBUG nova.network.neutron [-] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.676939] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c36ef3-2f7f-c137-530b-6ff47043db20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.091772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.114297] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.177279] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c36ef3-2f7f-c137-530b-6ff47043db20, 'name': SearchDatastore_Task, 'duration_secs': 0.010826} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.177606] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.177852] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1051.178106] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.178262] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.178446] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.178896] env[62923]: INFO nova.compute.manager [-] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Took 1.43 seconds to deallocate network for instance. [ 1051.179134] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c379ac0d-7774-48a3-bd79-754f0b52d4f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.189213] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.189393] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1051.190166] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b17c5b1d-51f4-4502-8f53-f0d1f4abdba8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.195803] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1051.195803] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52db0cd2-8416-4db7-f5fa-61a7c3a524f5" [ 1051.195803] env[62923]: _type = "Task" [ 1051.195803] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.203464] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52db0cd2-8416-4db7-f5fa-61a7c3a524f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.378442] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c66371-4bca-4fa6-9414-20d2019a3408 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.386049] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04acdbca-63d8-4d35-9bf1-50e9cc096e15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.415451] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925cbf12-38ae-4e35-9f7a-011f2697cd97 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.422551] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d192c47-e8a7-4d48-bde7-f0d43bc681d1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.436563] env[62923]: DEBUG nova.compute.provider_tree [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.522967] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "996cb68a-4a18-488d-890f-ace24dcd4c42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.523339] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.523479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "996cb68a-4a18-488d-890f-ace24dcd4c42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.523664] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.523831] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.526343] env[62923]: INFO nova.compute.manager [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Terminating instance [ 1051.528146] env[62923]: DEBUG nova.compute.manager [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1051.528349] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1051.529221] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ee35c0-a513-4c3b-a308-6896434cafa0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.536998] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.537521] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b485318-6ae8-44e3-acbd-0fb342f49a86 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.543193] env[62923]: DEBUG oslo_vmware.api [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1051.543193] env[62923]: value = "task-1370468" [ 1051.543193] env[62923]: _type = "Task" [ 1051.543193] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.550357] env[62923]: DEBUG oslo_vmware.api [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.631950] env[62923]: DEBUG nova.compute.manager [req-2c7c5250-a3e5-4730-afdf-3efdf7ab6f39 req-528e2ebc-80a0-42b3-ad70-0cd69f065096 service nova] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Received event network-vif-deleted-70dafc2e-d2a9-49fa-ac00-d46b002927bf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.687841] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.706722] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52db0cd2-8416-4db7-f5fa-61a7c3a524f5, 'name': SearchDatastore_Task, 'duration_secs': 0.008926} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.707602] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1359d355-5843-4cdd-8b97-2bf5b9878863 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.712642] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1051.712642] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529ef9a3-e680-1be7-de1f-d5bc30015f51" [ 1051.712642] env[62923]: _type = "Task" [ 1051.712642] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.722566] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529ef9a3-e680-1be7-de1f-d5bc30015f51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.940096] env[62923]: DEBUG nova.scheduler.client.report [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.053138] env[62923]: DEBUG oslo_vmware.api [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370468, 'name': PowerOffVM_Task, 'duration_secs': 0.240886} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.053427] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1052.053601] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1052.053849] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abc8daa4-946d-49b2-86b9-ffcff07084e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.112677] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1052.113013] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1052.113054] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Deleting the datastore file [datastore2] 996cb68a-4a18-488d-890f-ace24dcd4c42 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1052.113299] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56644871-7676-4d7e-998f-0b9ca86830ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.120240] env[62923]: DEBUG oslo_vmware.api [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for the task: (returnval){ [ 1052.120240] env[62923]: value = "task-1370470" [ 1052.120240] env[62923]: _type = "Task" [ 1052.120240] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.127859] env[62923]: DEBUG oslo_vmware.api [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.157024] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.157265] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.157504] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1052.223609] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]529ef9a3-e680-1be7-de1f-d5bc30015f51, 'name': SearchDatastore_Task, 'duration_secs': 0.016683} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.223947] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.224311] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1052.224639] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0027a1a4-8f6c-4e95-8d9e-c2bdca2ff344 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.231672] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1052.231672] env[62923]: value = "task-1370471" [ 1052.231672] env[62923]: _type = "Task" [ 1052.231672] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.239240] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370471, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.445261] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.445863] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1052.448982] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.357s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.449266] env[62923]: DEBUG nova.objects.instance [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lazy-loading 'resources' on Instance uuid 62889af3-06e9-4f5e-9ab0-87024e0678ca {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.630772] env[62923]: DEBUG oslo_vmware.api [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Task: {'id': task-1370470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150073} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.631053] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.631250] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.631436] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.631616] env[62923]: INFO nova.compute.manager [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1052.631870] env[62923]: DEBUG oslo.service.loopingcall [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.632086] env[62923]: DEBUG nova.compute.manager [-] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1052.632182] env[62923]: DEBUG nova.network.neutron [-] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.741364] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370471, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458051} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.741634] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1052.741847] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1052.742112] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a900a1e4-b328-4c0d-b84b-d6a2745f6dff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.748230] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1052.748230] env[62923]: value = "task-1370472" [ 1052.748230] env[62923]: _type = "Task" [ 1052.748230] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.754875] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.953020] env[62923]: DEBUG nova.compute.utils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1052.957036] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1052.957208] env[62923]: DEBUG nova.network.neutron [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.995702] env[62923]: DEBUG nova.policy [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '182e1b6f26ed401da24d07a85f993802', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '011a5ec25af44f92961be00f82c10c08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1053.066662] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06cb6f0-e7da-4507-9148-f1494e0ddcab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.074053] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3740c5-3fbd-4656-b504-7a9ef08321ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.103298] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f84b4d-29bc-4d31-968b-17a016b8fc24 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.111061] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f86ea77-3984-423b-a165-627bee1f827b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.124361] env[62923]: DEBUG nova.compute.provider_tree [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.263019] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062569} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.263019] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.263019] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159a52ce-3561-4f46-8898-7a92aff142c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.285124] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.286034] env[62923]: DEBUG nova.network.neutron [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Successfully created port: 9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1053.287934] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-144babf8-df38-4761-af5d-8f658d4279cd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.308151] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1053.308151] env[62923]: value = "task-1370473" [ 1053.308151] env[62923]: _type = "Task" [ 1053.308151] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.316732] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.365905] env[62923]: DEBUG nova.network.neutron [-] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.458296] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1053.628218] env[62923]: DEBUG nova.scheduler.client.report [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1053.682231] env[62923]: DEBUG nova.compute.manager [req-56962d7c-4cc3-4252-b139-c7be1badeeab req-89f5aef8-0f8b-412a-844d-cdf331668c0c service nova] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Received event network-vif-deleted-37d34472-7599-4430-aa04-02edec45a5ab {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1053.692537] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.692682] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquired lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.692824] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Forcefully refreshing network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1053.819157] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370473, 'name': ReconfigVM_Task, 'duration_secs': 0.303993} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.819459] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfigured VM instance instance-00000061 to attach disk [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9/b459a438-c287-4fbd-80f5-b5d3c31b83c9.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.820582] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'size': 0, 'encryption_secret_uuid': None, 'device_type': 'disk', 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'encryption_options': None, 'device_name': '/dev/sda', 'disk_bus': None, 'image_id': 'cd84cf13-77b9-4bc1-bb15-31bece605a8e'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'}, 'mount_device': '/dev/sdb', 'attachment_id': '4411eb06-37d2-4fe5-ad9d-563bbb0df075', 'guest_format': None, 'disk_bus': None, 'delete_on_termination': False, 'boot_index': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62923) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1053.820792] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1053.820988] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1053.821762] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6866857f-454f-463e-ad8f-b22a957f4d8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.836944] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe31bd0-ae21-4f40-b526-cd92ec047135 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.861895] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626/volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.862212] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb680217-e9de-4f88-8cf2-7f8400e3f3f9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.875828] env[62923]: INFO nova.compute.manager [-] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Took 1.24 seconds to deallocate network for instance. [ 1053.886142] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1053.886142] env[62923]: value = "task-1370474" [ 1053.886142] env[62923]: _type = "Task" [ 1053.886142] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.893704] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.135243] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.137646] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.023s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.137931] env[62923]: DEBUG nova.objects.instance [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lazy-loading 'resources' on Instance uuid 19e75201-8918-4b27-928b-633849222daf {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.158267] env[62923]: INFO nova.scheduler.client.report [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted allocations for instance 62889af3-06e9-4f5e-9ab0-87024e0678ca [ 1054.382230] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.395513] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370474, 'name': ReconfigVM_Task, 'duration_secs': 0.327052} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.395799] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfigured VM instance instance-00000061 to attach disk [datastore2] volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626/volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.400424] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8848a4f8-57b6-4574-9081-415152539f45 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.415512] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1054.415512] env[62923]: value = "task-1370475" [ 1054.415512] env[62923]: _type = "Task" [ 1054.415512] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.423623] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.468560] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1054.493338] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1054.493656] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1054.493856] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.494157] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1054.494404] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.494644] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1054.494949] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1054.495219] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1054.495483] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1054.495736] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1054.496015] env[62923]: DEBUG nova.virt.hardware [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1054.496999] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e980c8aa-5637-4380-b4aa-72e5698802b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.505715] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5db9fd-fee8-4b40-8763-395cc1775f87 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.666942] env[62923]: DEBUG oslo_concurrency.lockutils [None req-df96afaa-74fc-4612-81ed-b526c5d684f3 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "62889af3-06e9-4f5e-9ab0-87024e0678ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.540s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.752010] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f7de2f-e22a-4cad-8c87-5f227481d32e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.759427] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c17570-bb0b-4460-b26b-1e4b38b25d3e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.790580] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805d3a26-3474-417c-9abb-88b48c7d24e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.799163] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84cad2f-eab7-4add-86f9-4e613c51e211 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.809213] env[62923]: DEBUG nova.network.neutron [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Successfully updated port: 9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1054.821465] env[62923]: DEBUG nova.compute.provider_tree [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.927023] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370475, 'name': ReconfigVM_Task, 'duration_secs': 0.132102} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.927349] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1054.927942] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c1404fa-0e7b-4cab-9f7a-bdd8ac6e742f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.934012] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1054.934012] env[62923]: value = "task-1370476" [ 1054.934012] env[62923]: _type = "Task" [ 1054.934012] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.941338] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370476, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.954206] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [{"id": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "address": "fa:16:3e:c8:44:3f", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5f5c80e-b5", "ovs_interfaceid": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.312207] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.312463] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.312525] env[62923]: DEBUG nova.network.neutron [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.324374] env[62923]: DEBUG nova.scheduler.client.report [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1055.443616] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370476, 'name': Rename_Task, 'duration_secs': 0.136883} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.443875] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.444134] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9faea12-a7ab-43c3-86ed-31b3ac6c8748 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.449564] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1055.449564] env[62923]: value = "task-1370477" [ 1055.449564] env[62923]: _type = "Task" [ 1055.449564] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.456595] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Releasing lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.456742] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updated the network info_cache for instance {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1055.456947] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.457148] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.457306] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.457450] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.457590] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.457725] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.457865] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.457987] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1055.458139] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.708733] env[62923]: DEBUG nova.compute.manager [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Received event network-vif-plugged-9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.708956] env[62923]: DEBUG oslo_concurrency.lockutils [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] Acquiring lock "222b944d-c58e-476e-b723-fc2b6990120a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.709187] env[62923]: DEBUG oslo_concurrency.lockutils [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] Lock "222b944d-c58e-476e-b723-fc2b6990120a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.709359] env[62923]: DEBUG oslo_concurrency.lockutils [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] Lock "222b944d-c58e-476e-b723-fc2b6990120a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.709529] env[62923]: DEBUG nova.compute.manager [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] No waiting events found dispatching network-vif-plugged-9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1055.709697] env[62923]: WARNING nova.compute.manager [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Received unexpected event network-vif-plugged-9f0373d6-6c37-4438-8d48-8aa143026856 for instance with vm_state building and task_state spawning. [ 1055.709861] env[62923]: DEBUG nova.compute.manager [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Received event network-changed-9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.710029] env[62923]: DEBUG nova.compute.manager [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Refreshing instance network info cache due to event network-changed-9f0373d6-6c37-4438-8d48-8aa143026856. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1055.710202] env[62923]: DEBUG oslo_concurrency.lockutils [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.828736] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.830955] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.143s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.831942] env[62923]: DEBUG nova.objects.instance [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'resources' on Instance uuid 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.845104] env[62923]: DEBUG nova.network.neutron [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1055.849823] env[62923]: INFO nova.scheduler.client.report [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted allocations for instance 19e75201-8918-4b27-928b-633849222daf [ 1055.960996] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.961307] env[62923]: DEBUG oslo_vmware.api [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370477, 'name': PowerOnVM_Task, 'duration_secs': 0.492269} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.961556] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1055.961762] env[62923]: DEBUG nova.compute.manager [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1055.962547] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fcff1c-4790-4bfd-bdd0-60e1606c8d77 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.022822] env[62923]: DEBUG nova.network.neutron [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.357541] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca4ba67e-103c-4ebe-b391-55d7fe5676c8 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "19e75201-8918-4b27-928b-633849222daf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.164s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.435449] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4600f2f3-1042-48ac-9dd8-42e0b863ef57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.443185] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbafd0aa-f631-4988-a5f3-e97eb2d9d6dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.478113] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7854bac7-019a-4766-a0bc-4d129ddfcc66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.481758] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.486438] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe03f4ef-0988-483e-83c8-ff4af7ea253f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.498906] env[62923]: DEBUG nova.compute.provider_tree [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.525099] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.525404] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Instance network_info: |[{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1056.525692] env[62923]: DEBUG oslo_concurrency.lockutils [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.525874] env[62923]: DEBUG nova.network.neutron [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Refreshing network info cache for port 9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1056.527024] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:2b:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4c7a041-8e34-47f9-8ea1-d2f29414fd9d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f0373d6-6c37-4438-8d48-8aa143026856', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.534327] env[62923]: DEBUG oslo.service.loopingcall [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.535175] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1056.535374] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5518e7c-b643-46a1-a3c9-32c74231196a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.554788] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.554788] env[62923]: value = "task-1370478" [ 1056.554788] env[62923]: _type = "Task" [ 1056.554788] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.563665] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370478, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.002122] env[62923]: DEBUG nova.scheduler.client.report [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1057.063731] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370478, 'name': CreateVM_Task, 'duration_secs': 0.329241} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.063862] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1057.064541] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.064715] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.065038] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1057.065286] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ac79d11-a037-444a-b8ad-c7907fbc156e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.069821] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1057.069821] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5281c960-c7bd-d77d-2e66-527d1a405395" [ 1057.069821] env[62923]: _type = "Task" [ 1057.069821] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.079136] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5281c960-c7bd-d77d-2e66-527d1a405395, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.325778] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "fc9ed87d-147a-47c0-b37e-720f20132b17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.326089] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.330197] env[62923]: DEBUG nova.network.neutron [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updated VIF entry in instance network info cache for port 9f0373d6-6c37-4438-8d48-8aa143026856. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.330518] env[62923]: DEBUG nova.network.neutron [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.357137] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.357373] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.506940] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.509466] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.127s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.510056] env[62923]: DEBUG nova.objects.instance [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lazy-loading 'resources' on Instance uuid 996cb68a-4a18-488d-890f-ace24dcd4c42 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.526204] env[62923]: INFO nova.scheduler.client.report [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Deleted allocations for instance 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7 [ 1057.580444] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5281c960-c7bd-d77d-2e66-527d1a405395, 'name': SearchDatastore_Task, 'duration_secs': 0.008862} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.580736] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.580970] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.581222] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.581375] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.581559] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.581819] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2562a8c3-a0f4-4360-b71a-33dc5f688643 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.589642] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.589823] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1057.590527] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d42fd36-08d2-4b96-a97e-055a90fdc4bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.595159] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1057.595159] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52801e39-df5e-38b3-2d81-f959cc2f7876" [ 1057.595159] env[62923]: _type = "Task" [ 1057.595159] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.602669] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52801e39-df5e-38b3-2d81-f959cc2f7876, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.833036] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1057.835565] env[62923]: DEBUG oslo_concurrency.lockutils [req-b55a288e-4283-479c-8793-622169b74055 req-46820a9a-50d8-4007-9d74-e931485d0c28 service nova] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.859494] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1058.033113] env[62923]: DEBUG oslo_concurrency.lockutils [None req-dc494438-142d-4ef0-a2a8-7a82395ee3ed tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.420s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.107914] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52801e39-df5e-38b3-2d81-f959cc2f7876, 'name': SearchDatastore_Task, 'duration_secs': 0.008069} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.108718] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4aa557b1-d1e2-4cb9-b661-c66172924020 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.116559] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1058.116559] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c33e45-e8b8-5d75-00f8-818648ee6d5e" [ 1058.116559] env[62923]: _type = "Task" [ 1058.116559] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.120611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.120611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.123557] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b50c3a1-a509-4acb-8fe4-dff4487671f5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.128875] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c33e45-e8b8-5d75-00f8-818648ee6d5e, 'name': SearchDatastore_Task, 'duration_secs': 0.00877} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.130048] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.130625] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1058.130625] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-025dc8da-7552-4050-ae82-2df54a128909 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.135593] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1852da1-ad0d-4d68-b371-c684e0f27ab0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.139648] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1058.139648] env[62923]: value = "task-1370479" [ 1058.139648] env[62923]: _type = "Task" [ 1058.139648] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.169836] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45955591-80cd-4b8e-932a-320d05baf2d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.172445] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.178550] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ed0c10-5271-4481-a0a8-2cc419ed55d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.192257] env[62923]: DEBUG nova.compute.provider_tree [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.357257] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.380562] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.622907] env[62923]: INFO nova.compute.manager [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Detaching volume 46d4817c-b1e2-4f6b-a75d-5b24a3acd626 [ 1058.656663] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462183} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.656880] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1058.657497] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1058.657497] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc00386e-60af-4544-baa7-5a1ff937173a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.664084] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1058.664084] env[62923]: value = "task-1370480" [ 1058.664084] env[62923]: _type = "Task" [ 1058.664084] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.672817] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370480, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.673937] env[62923]: INFO nova.virt.block_device [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Attempting to driver detach volume 46d4817c-b1e2-4f6b-a75d-5b24a3acd626 from mountpoint /dev/sdb [ 1058.674171] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1058.674419] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1058.675225] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f839446-fa74-4aff-b7df-9de594f4b3d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.701668] env[62923]: DEBUG nova.scheduler.client.report [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1058.705185] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cb3d5f-f85c-432b-b3ce-53a9d7901199 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.712309] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a82977-af82-41a0-ae9a-39363f0dd6ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.733474] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9a1067-de83-4b9d-8975-0fe7d44e9e7e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.748596] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] The volume has not been displaced from its original location: [datastore2] volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626/volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1058.753711] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1058.754014] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1255a1ee-0866-4f7d-84be-b4037431bb6d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.772424] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1058.772424] env[62923]: value = "task-1370481" [ 1058.772424] env[62923]: _type = "Task" [ 1058.772424] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.780759] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.174544] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370480, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06012} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.174773] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1059.175559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e87a24-8b11-49bb-b1ba-b6aed16f89dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.198694] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1059.199011] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0948a612-b99e-4cde-84b8-d3b0181a643d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.213604] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.704s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.215723] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.255s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.215887] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.216051] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1059.216430] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.735s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.216610] env[62923]: DEBUG nova.objects.instance [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62923) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1059.220325] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d959df33-84bc-48af-870e-b95163c90349 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.224971] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1059.224971] env[62923]: value = "task-1370482" [ 1059.224971] env[62923]: _type = "Task" [ 1059.224971] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.230607] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4e2eb7-d2a7-451d-9446-e7c711678a3c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.240862] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370482, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.242040] env[62923]: INFO nova.scheduler.client.report [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Deleted allocations for instance 996cb68a-4a18-488d-890f-ace24dcd4c42 [ 1059.254190] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cc99d7-9571-44d6-b502-692c2bd2f802 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.262335] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f908d8-8bb2-43f1-8c27-b83ed452c1d8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.293882] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180588MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1059.293997] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.303639] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.735416] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370482, 'name': ReconfigVM_Task, 'duration_secs': 0.252029} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.735738] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1059.736572] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d67f7a4-c960-464f-ba37-57c21922a1e3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.742682] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1059.742682] env[62923]: value = "task-1370483" [ 1059.742682] env[62923]: _type = "Task" [ 1059.742682] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.751917] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370483, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.761015] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6d1f295b-d275-44cd-8480-07dcffa59842 tempest-ServersNegativeTestMultiTenantJSON-1731198954 tempest-ServersNegativeTestMultiTenantJSON-1731198954-project-member] Lock "996cb68a-4a18-488d-890f-ace24dcd4c42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.238s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.806298] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.831957] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.832301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.225349] env[62923]: DEBUG oslo_concurrency.lockutils [None req-0e252547-408a-44e1-ba07-b6748464ad1d tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.226684] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.869s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.228021] env[62923]: INFO nova.compute.claims [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1060.253275] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370483, 'name': Rename_Task, 'duration_secs': 0.135062} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.253559] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.253811] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04aad928-f8b3-4f4a-b2c3-60869ab3d240 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.260416] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1060.260416] env[62923]: value = "task-1370484" [ 1060.260416] env[62923]: _type = "Task" [ 1060.260416] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.269998] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370484, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.304159] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370481, 'name': ReconfigVM_Task, 'duration_secs': 1.364982} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.304516] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1060.310394] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b58523df-181c-40fa-94a7-c96a0c9c7b41 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.327114] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1060.327114] env[62923]: value = "task-1370485" [ 1060.327114] env[62923]: _type = "Task" [ 1060.327114] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.335558] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1060.337652] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.770866] env[62923]: DEBUG oslo_vmware.api [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370484, 'name': PowerOnVM_Task, 'duration_secs': 0.433124} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.771159] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1060.771364] env[62923]: INFO nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1060.771575] env[62923]: DEBUG nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1060.772366] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a5a08a-0022-41a2-a8ce-3882b6b5def1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.836852] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.858042] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.290233] env[62923]: INFO nova.compute.manager [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Took 11.05 seconds to build instance. [ 1061.338789] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370485, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.361260] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6623342d-ee99-4b85-9f57-0fc02bd9c829 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.370683] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898950ed-5629-4739-bc5f-72c5cdebd8ef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.400209] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889547e4-d64b-4ed0-8042-2ca4b93a862a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.407488] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fbfef4-b8a4-4b04-ae89-a80cbac8d982 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.420200] env[62923]: DEBUG nova.compute.provider_tree [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.792681] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5e2d81f9-198a-4bcb-b508-21855812a8f9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.563s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.845316] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370485, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.923099] env[62923]: DEBUG nova.scheduler.client.report [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1062.339865] env[62923]: DEBUG oslo_vmware.api [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370485, 'name': ReconfigVM_Task, 'duration_secs': 1.918513} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.340898] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291531', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'name': 'volume-46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b459a438-c287-4fbd-80f5-b5d3c31b83c9', 'attached_at': '', 'detached_at': '', 'volume_id': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626', 'serial': '46d4817c-b1e2-4f6b-a75d-5b24a3acd626'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1062.416646] env[62923]: DEBUG nova.compute.manager [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Received event network-changed-9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.416925] env[62923]: DEBUG nova.compute.manager [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Refreshing instance network info cache due to event network-changed-9f0373d6-6c37-4438-8d48-8aa143026856. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1062.417446] env[62923]: DEBUG oslo_concurrency.lockutils [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.417704] env[62923]: DEBUG oslo_concurrency.lockutils [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.417907] env[62923]: DEBUG nova.network.neutron [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Refreshing network info cache for port 9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.429014] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.202s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.429663] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1062.434035] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.055s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.435523] env[62923]: INFO nova.compute.claims [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1062.890019] env[62923]: DEBUG nova.objects.instance [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'flavor' on Instance uuid b459a438-c287-4fbd-80f5-b5d3c31b83c9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.940226] env[62923]: DEBUG nova.compute.utils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1062.943556] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1062.943728] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1062.984507] env[62923]: DEBUG nova.policy [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715bac2639c841dba876a5c1b74e6ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7373f1d735a4f51a8524e0aa4b39b50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1063.160255] env[62923]: DEBUG nova.network.neutron [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updated VIF entry in instance network info cache for port 9f0373d6-6c37-4438-8d48-8aa143026856. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.160255] env[62923]: DEBUG nova.network.neutron [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.243785] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Successfully created port: e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1063.444151] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1063.564860] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7c1382-6774-4aed-9ab3-eb7f7a257dc5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.572830] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5dd39b-3c3e-4869-b862-c379ed39b10b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.601933] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64fd608-0fa9-4aa9-b9b0-06621acbd99e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.608966] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ed8b3b-ed9c-431d-ac11-de4142ca0fd7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.623208] env[62923]: DEBUG nova.compute.provider_tree [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.662825] env[62923]: DEBUG oslo_concurrency.lockutils [req-ec768a50-4d8d-47f6-bf48-3892a3e86b89 req-8d6bce12-6661-4dd6-8db1-a3c938a13f1f service nova] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.897287] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5813177e-d660-47ca-ae5d-cdd9ead80bd9 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 5.777s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.126233] env[62923]: DEBUG nova.scheduler.client.report [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1064.456592] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.456840] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.457075] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.457269] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.457484] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.459917] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1064.462282] env[62923]: INFO nova.compute.manager [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Terminating instance [ 1064.464267] env[62923]: DEBUG nova.compute.manager [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1064.464459] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.465290] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8343c2-5254-4431-897b-dd8427baf2a1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.473118] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.473361] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8049cc9d-d147-497c-a0a6-1e857f9ca95c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.480175] env[62923]: DEBUG oslo_vmware.api [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1064.480175] env[62923]: value = "task-1370486" [ 1064.480175] env[62923]: _type = "Task" [ 1064.480175] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.485623] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1064.485841] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1064.485998] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1064.486192] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1064.487989] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1064.488603] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef552cd-ebf4-4064-b380-5a83c0152295 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.493393] env[62923]: DEBUG oslo_vmware.api [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.498138] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdb1972-35a0-4e94-9725-1d052f22cfde {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.631170] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.198s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.631920] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1064.634998] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.341s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.653140] env[62923]: DEBUG nova.compute.manager [req-ff49e18b-faa2-4702-b035-7a692e25630e req-8261a99e-9e29-4fb6-800c-9b4d6cb9e143 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Received event network-vif-plugged-e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1064.653140] env[62923]: DEBUG oslo_concurrency.lockutils [req-ff49e18b-faa2-4702-b035-7a692e25630e req-8261a99e-9e29-4fb6-800c-9b4d6cb9e143 service nova] Acquiring lock "fc9ed87d-147a-47c0-b37e-720f20132b17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.653140] env[62923]: DEBUG oslo_concurrency.lockutils [req-ff49e18b-faa2-4702-b035-7a692e25630e req-8261a99e-9e29-4fb6-800c-9b4d6cb9e143 service nova] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.653140] env[62923]: DEBUG oslo_concurrency.lockutils [req-ff49e18b-faa2-4702-b035-7a692e25630e req-8261a99e-9e29-4fb6-800c-9b4d6cb9e143 service nova] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.654328] env[62923]: DEBUG nova.compute.manager [req-ff49e18b-faa2-4702-b035-7a692e25630e req-8261a99e-9e29-4fb6-800c-9b4d6cb9e143 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] No waiting events found dispatching network-vif-plugged-e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1064.654702] env[62923]: WARNING nova.compute.manager [req-ff49e18b-faa2-4702-b035-7a692e25630e req-8261a99e-9e29-4fb6-800c-9b4d6cb9e143 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Received unexpected event network-vif-plugged-e0cf2509-917a-467c-a0f2-4bcc7abb8bbf for instance with vm_state building and task_state spawning. [ 1064.990719] env[62923]: DEBUG oslo_vmware.api [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370486, 'name': PowerOffVM_Task, 'duration_secs': 0.365323} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.991187] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.991187] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.991380] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccae08d1-f397-4a65-9ca1-eb092e0e962b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.066428] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.066629] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.066820] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore1] b459a438-c287-4fbd-80f5-b5d3c31b83c9 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.067128] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f34079a3-f026-47fc-bf1d-adc7113d503d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.074578] env[62923]: DEBUG oslo_vmware.api [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1065.074578] env[62923]: value = "task-1370488" [ 1065.074578] env[62923]: _type = "Task" [ 1065.074578] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.084158] env[62923]: DEBUG oslo_vmware.api [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.137094] env[62923]: DEBUG nova.compute.utils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1065.138757] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1065.138969] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1065.217804] env[62923]: DEBUG nova.policy [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '715bac2639c841dba876a5c1b74e6ade', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7373f1d735a4f51a8524e0aa4b39b50', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1065.251742] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Successfully updated port: e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1065.283327] env[62923]: DEBUG nova.compute.manager [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Received event network-changed-e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1065.283327] env[62923]: DEBUG nova.compute.manager [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Refreshing instance network info cache due to event network-changed-e0cf2509-917a-467c-a0f2-4bcc7abb8bbf. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1065.283327] env[62923]: DEBUG oslo_concurrency.lockutils [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] Acquiring lock "refresh_cache-fc9ed87d-147a-47c0-b37e-720f20132b17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.283327] env[62923]: DEBUG oslo_concurrency.lockutils [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] Acquired lock "refresh_cache-fc9ed87d-147a-47c0-b37e-720f20132b17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.283327] env[62923]: DEBUG nova.network.neutron [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Refreshing network info cache for port e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1065.473725] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Successfully created port: a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1065.588408] env[62923]: DEBUG oslo_vmware.api [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157905} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.588801] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.589111] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.589412] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.589709] env[62923]: INFO nova.compute.manager [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1065.590100] env[62923]: DEBUG oslo.service.loopingcall [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1065.590394] env[62923]: DEBUG nova.compute.manager [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1065.590541] env[62923]: DEBUG nova.network.neutron [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.645969] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1065.672071] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.672211] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b459a438-c287-4fbd-80f5-b5d3c31b83c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.672337] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 75f9473f-ca67-4bb5-8663-0ce3709885e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.672452] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance aae1a2a3-57da-4846-8240-ac0626e9ebd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.672562] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 222b944d-c58e-476e-b723-fc2b6990120a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.672675] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance fc9ed87d-147a-47c0-b37e-720f20132b17 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.672785] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance cb87a7b0-5a88-4b58-aea3-ce998cf579b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1065.754975] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "refresh_cache-fc9ed87d-147a-47c0-b37e-720f20132b17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1065.831476] env[62923]: DEBUG nova.network.neutron [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1066.143566] env[62923]: DEBUG nova.network.neutron [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.177199] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1066.177496] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1066.177684] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1066.323948] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d83812c-cea8-46d6-8fa3-b68ca0e979b3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.332035] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e7dfc8-42d9-49b3-96c0-7f3c5d83b65c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.364130] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5308d3b-f264-4b71-8e68-5cd78e651d70 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.372316] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09007ac8-a36c-4426-b8d0-3d3869b88fe6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.387372] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.647106] env[62923]: DEBUG oslo_concurrency.lockutils [req-de051f9a-eb0e-451a-90b8-3c539eee0ee2 req-28c4088b-028c-431c-b9d7-f361c6f91e9a service nova] Releasing lock "refresh_cache-fc9ed87d-147a-47c0-b37e-720f20132b17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1066.647585] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "refresh_cache-fc9ed87d-147a-47c0-b37e-720f20132b17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.647791] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.662526] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1066.687027] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1066.687288] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1066.687525] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.687721] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1066.687874] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.688051] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1066.688264] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1066.688424] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1066.688591] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1066.688788] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1066.688977] env[62923]: DEBUG nova.virt.hardware [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1066.689926] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dba2e1d-5260-4732-ab92-adfa1cb088dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.698607] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3025d73-dfa8-4c46-a9db-e12227c20d20 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.848349] env[62923]: DEBUG nova.compute.manager [req-ad71510e-336a-4024-ab82-fd9d57a46759 req-5dcafe82-2c56-428f-84a6-2acf59107a97 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Received event network-vif-plugged-a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1066.848549] env[62923]: DEBUG oslo_concurrency.lockutils [req-ad71510e-336a-4024-ab82-fd9d57a46759 req-5dcafe82-2c56-428f-84a6-2acf59107a97 service nova] Acquiring lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.848771] env[62923]: DEBUG oslo_concurrency.lockutils [req-ad71510e-336a-4024-ab82-fd9d57a46759 req-5dcafe82-2c56-428f-84a6-2acf59107a97 service nova] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.848945] env[62923]: DEBUG oslo_concurrency.lockutils [req-ad71510e-336a-4024-ab82-fd9d57a46759 req-5dcafe82-2c56-428f-84a6-2acf59107a97 service nova] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.849127] env[62923]: DEBUG nova.compute.manager [req-ad71510e-336a-4024-ab82-fd9d57a46759 req-5dcafe82-2c56-428f-84a6-2acf59107a97 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] No waiting events found dispatching network-vif-plugged-a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1066.849294] env[62923]: WARNING nova.compute.manager [req-ad71510e-336a-4024-ab82-fd9d57a46759 req-5dcafe82-2c56-428f-84a6-2acf59107a97 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Received unexpected event network-vif-plugged-a84ec101-c21e-4c0b-b736-a05c336817e9 for instance with vm_state building and task_state spawning. [ 1066.868283] env[62923]: DEBUG nova.network.neutron [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.889965] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1067.180833] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1067.314341] env[62923]: DEBUG nova.compute.manager [req-d69c27f6-2b37-4bd0-9d57-8f72b1ad4687 req-03499889-f0a1-481a-866d-2e25754e928e service nova] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Received event network-vif-deleted-35f893d5-3b23-4350-92a9-e3803a075eb0 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1067.315411] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Updating instance_info_cache with network_info: [{"id": "e0cf2509-917a-467c-a0f2-4bcc7abb8bbf", "address": "fa:16:3e:30:48:7f", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0cf2509-91", "ovs_interfaceid": "e0cf2509-917a-467c-a0f2-4bcc7abb8bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.370891] env[62923]: INFO nova.compute.manager [-] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Took 1.78 seconds to deallocate network for instance. [ 1067.395164] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1067.395355] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.760s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.395717] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.538s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.397379] env[62923]: INFO nova.compute.claims [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1067.406069] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Successfully updated port: a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1067.429343] env[62923]: DEBUG nova.compute.manager [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Received event network-changed-a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1067.429647] env[62923]: DEBUG nova.compute.manager [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Refreshing instance network info cache due to event network-changed-a84ec101-c21e-4c0b-b736-a05c336817e9. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1067.429873] env[62923]: DEBUG oslo_concurrency.lockutils [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] Acquiring lock "refresh_cache-cb87a7b0-5a88-4b58-aea3-ce998cf579b6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.430130] env[62923]: DEBUG oslo_concurrency.lockutils [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] Acquired lock "refresh_cache-cb87a7b0-5a88-4b58-aea3-ce998cf579b6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.430366] env[62923]: DEBUG nova.network.neutron [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Refreshing network info cache for port a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1067.818450] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "refresh_cache-fc9ed87d-147a-47c0-b37e-720f20132b17" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.818814] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Instance network_info: |[{"id": "e0cf2509-917a-467c-a0f2-4bcc7abb8bbf", "address": "fa:16:3e:30:48:7f", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0cf2509-91", "ovs_interfaceid": "e0cf2509-917a-467c-a0f2-4bcc7abb8bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1067.819289] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:48:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0cf2509-917a-467c-a0f2-4bcc7abb8bbf', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.826750] env[62923]: DEBUG oslo.service.loopingcall [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1067.826955] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.827193] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-590bb5f4-f365-4355-8e49-015daeacb479 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.846279] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.846279] env[62923]: value = "task-1370489" [ 1067.846279] env[62923]: _type = "Task" [ 1067.846279] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.853502] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370489, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.877804] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.908683] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "refresh_cache-cb87a7b0-5a88-4b58-aea3-ce998cf579b6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.960522] env[62923]: DEBUG nova.network.neutron [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1068.032032] env[62923]: DEBUG nova.network.neutron [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.356118] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370489, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.512033] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05947c95-6c89-47e8-ae2d-7ec69fcaeda3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.519828] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003ab952-09d6-47d1-82b5-d6f535cc6dad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.549345] env[62923]: DEBUG oslo_concurrency.lockutils [req-fbb1c706-092e-45aa-96c0-5f7d6d312413 req-f83d1c47-891d-42b2-82db-ad47db5a0ef9 service nova] Releasing lock "refresh_cache-cb87a7b0-5a88-4b58-aea3-ce998cf579b6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.549907] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "refresh_cache-cb87a7b0-5a88-4b58-aea3-ce998cf579b6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.550085] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.552164] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1428e4c9-d72c-456b-8e58-225e5710c4ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.561064] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cb107a-c518-4e5f-89d0-b0711be75a99 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.575414] env[62923]: DEBUG nova.compute.provider_tree [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.856735] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370489, 'name': CreateVM_Task, 'duration_secs': 0.630995} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.856926] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.857638] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.857800] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.858129] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1068.858384] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8490573f-0ddf-4127-ae9c-b2b71525cca2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.863032] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1068.863032] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52175fbe-41a4-8212-4829-6a8bd971fb55" [ 1068.863032] env[62923]: _type = "Task" [ 1068.863032] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.870244] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52175fbe-41a4-8212-4829-6a8bd971fb55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.078633] env[62923]: DEBUG nova.scheduler.client.report [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1069.083483] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1069.228864] env[62923]: DEBUG nova.network.neutron [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Updating instance_info_cache with network_info: [{"id": "a84ec101-c21e-4c0b-b736-a05c336817e9", "address": "fa:16:3e:f4:d0:90", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa84ec101-c2", "ovs_interfaceid": "a84ec101-c21e-4c0b-b736-a05c336817e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.373664] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52175fbe-41a4-8212-4829-6a8bd971fb55, 'name': SearchDatastore_Task, 'duration_secs': 0.009358} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.374022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.374219] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.374492] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.374657] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.374863] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.375143] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a400412-ae3d-4cb2-b152-123269845aa3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.382882] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.383073] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.383733] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f9115ee-9321-4248-9ba5-772ff2a0875e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.388457] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1069.388457] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5284cb33-5333-4ce7-d77a-b0bce0d4e27f" [ 1069.388457] env[62923]: _type = "Task" [ 1069.388457] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.395514] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5284cb33-5333-4ce7-d77a-b0bce0d4e27f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.525180] env[62923]: DEBUG oslo_concurrency.lockutils [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.525472] env[62923]: DEBUG oslo_concurrency.lockutils [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.583566] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.584173] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1069.587248] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.709s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.587478] env[62923]: DEBUG nova.objects.instance [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'resources' on Instance uuid b459a438-c287-4fbd-80f5-b5d3c31b83c9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.731996] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "refresh_cache-cb87a7b0-5a88-4b58-aea3-ce998cf579b6" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.732338] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Instance network_info: |[{"id": "a84ec101-c21e-4c0b-b736-a05c336817e9", "address": "fa:16:3e:f4:d0:90", "network": {"id": "9f2d90b9-a510-4eab-b512-ae6b4edab14e", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1167790952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7373f1d735a4f51a8524e0aa4b39b50", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c47e98ff-83cf-48d2-bf91-2931c7386b6a", "external-id": "nsx-vlan-transportzone-992", "segmentation_id": 992, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa84ec101-c2", "ovs_interfaceid": "a84ec101-c21e-4c0b-b736-a05c336817e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1069.732777] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:d0:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c47e98ff-83cf-48d2-bf91-2931c7386b6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a84ec101-c21e-4c0b-b736-a05c336817e9', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1069.740276] env[62923]: DEBUG oslo.service.loopingcall [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1069.740489] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1069.740721] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e14b476-3ef5-4f67-8034-2a89439557f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.761602] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1069.761602] env[62923]: value = "task-1370490" [ 1069.761602] env[62923]: _type = "Task" [ 1069.761602] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.769086] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370490, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.898550] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5284cb33-5333-4ce7-d77a-b0bce0d4e27f, 'name': SearchDatastore_Task, 'duration_secs': 0.007633} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.899455] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fee12cb-742e-4c2e-b57a-cbd2fe7d3ba4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.904426] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1069.904426] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5203a86f-1d8f-ea40-d273-f9beb3877684" [ 1069.904426] env[62923]: _type = "Task" [ 1069.904426] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.913251] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5203a86f-1d8f-ea40-d273-f9beb3877684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.028623] env[62923]: INFO nova.compute.manager [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Detaching volume 7c87212b-d640-4716-ace2-030c4b7ed621 [ 1070.059724] env[62923]: INFO nova.virt.block_device [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Attempting to driver detach volume 7c87212b-d640-4716-ace2-030c4b7ed621 from mountpoint /dev/sdb [ 1070.059979] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1070.060181] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291523', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'name': 'volume-7c87212b-d640-4716-ace2-030c4b7ed621', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '75f9473f-ca67-4bb5-8663-0ce3709885e9', 'attached_at': '', 'detached_at': '', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'serial': '7c87212b-d640-4716-ace2-030c4b7ed621'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1070.061073] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8b9027-6cee-489d-a5a9-b6e6f84cde06 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.081901] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4094fe-ddcb-45e6-8530-20c2dfddfdea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.088757] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51dc6772-3bba-4f08-a3c7-4c10c11d3955 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.092070] env[62923]: DEBUG nova.compute.utils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1070.095583] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1070.095754] env[62923]: DEBUG nova.network.neutron [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1070.119433] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fa2ea5-cbf0-4ed5-959b-33763f92b9d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.135336] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] The volume has not been displaced from its original location: [datastore2] volume-7c87212b-d640-4716-ace2-030c4b7ed621/volume-7c87212b-d640-4716-ace2-030c4b7ed621.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1070.140460] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1070.141824] env[62923]: DEBUG nova.policy [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3732884d90b416597cfc499ebf82e53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a855374ba4624ee78230d07b85b2ab8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1070.145123] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-890ea809-2b3b-4eda-a2a7-7d212eb9646e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.163933] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1070.163933] env[62923]: value = "task-1370491" [ 1070.163933] env[62923]: _type = "Task" [ 1070.163933] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.174244] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370491, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.228747] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4cf9e7-1c9b-4a15-8e0f-2b88106b3e63 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.236358] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad32edff-8a96-45ba-9957-9c7694c29c02 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.270477] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b130a2fc-759e-43bd-975a-1a45943d6925 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.281121] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af361cb-d95e-4de1-bd01-a2aa719cc8a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.284564] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370490, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.294535] env[62923]: DEBUG nova.compute.provider_tree [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.411886] env[62923]: DEBUG nova.network.neutron [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Successfully created port: dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1070.420396] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5203a86f-1d8f-ea40-d273-f9beb3877684, 'name': SearchDatastore_Task, 'duration_secs': 0.010251} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.420695] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.421066] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] fc9ed87d-147a-47c0-b37e-720f20132b17/fc9ed87d-147a-47c0-b37e-720f20132b17.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.421344] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4c22321-72ac-45d0-accd-1ad1c9c5b363 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.427701] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1070.427701] env[62923]: value = "task-1370492" [ 1070.427701] env[62923]: _type = "Task" [ 1070.427701] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.436775] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.599537] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1070.674959] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.777542] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370490, 'name': CreateVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.797040] env[62923]: DEBUG nova.scheduler.client.report [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1070.938728] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.175889] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.281236] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370490, 'name': CreateVM_Task, 'duration_secs': 1.416558} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.281413] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1071.282191] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.282478] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.282907] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1071.283329] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2522f73-14e2-4e21-9a21-4fe73c333562 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.288452] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1071.288452] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa4d88-093f-fdbf-8a58-f1cf92a34458" [ 1071.288452] env[62923]: _type = "Task" [ 1071.288452] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.297300] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa4d88-093f-fdbf-8a58-f1cf92a34458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.301260] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.714s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.326575] env[62923]: INFO nova.scheduler.client.report [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted allocations for instance b459a438-c287-4fbd-80f5-b5d3c31b83c9 [ 1071.440060] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370492, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.611663] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1071.636102] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1071.636375] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1071.636628] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1071.636836] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1071.636989] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1071.637159] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1071.637369] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1071.637630] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1071.637826] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1071.637994] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1071.638186] env[62923]: DEBUG nova.virt.hardware [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1071.639038] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8a4266-4dbd-4c32-b71a-8541e5f8999b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.647064] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47a35f1-f56e-4436-9ea7-b658a777371a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.672657] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370491, 'name': ReconfigVM_Task, 'duration_secs': 1.34343} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.672914] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1071.678580] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-feca0c47-0d3f-40a0-bb17-cf70aa10c3d2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.694638] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1071.694638] env[62923]: value = "task-1370493" [ 1071.694638] env[62923]: _type = "Task" [ 1071.694638] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.705615] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370493, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.767417] env[62923]: DEBUG oslo_concurrency.lockutils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.768058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.798775] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52aa4d88-093f-fdbf-8a58-f1cf92a34458, 'name': SearchDatastore_Task, 'duration_secs': 0.051867} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.799089] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.799329] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.799563] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.799713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.799892] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.800162] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b86dbdd-3848-4f6a-96d0-4055496ddee0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.808012] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.808218] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.809138] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af4aadd3-3197-439c-8f32-c0aef3e16b8f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.814117] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1071.814117] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524dfd80-9c78-a093-993e-585a40ff4c42" [ 1071.814117] env[62923]: _type = "Task" [ 1071.814117] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.821488] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524dfd80-9c78-a093-993e-585a40ff4c42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.835651] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5ff89972-b58a-49e4-9ad9-994d97b354b6 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b459a438-c287-4fbd-80f5-b5d3c31b83c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.379s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.939393] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370492, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.207555] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.271149] env[62923]: DEBUG nova.compute.utils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1072.323771] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]524dfd80-9c78-a093-993e-585a40ff4c42, 'name': SearchDatastore_Task, 'duration_secs': 0.00805} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.324604] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e3fdc4c-345b-4dd8-a102-144ca9af9a4f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.329584] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1072.329584] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b631ef-f378-c152-82a5-3d9fbe742ae5" [ 1072.329584] env[62923]: _type = "Task" [ 1072.329584] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.336963] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b631ef-f378-c152-82a5-3d9fbe742ae5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.348246] env[62923]: DEBUG nova.compute.manager [req-2f40f591-10a9-4566-93e2-07ed86ce9124 req-b7b00907-7df0-473a-be35-1ba322949598 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Received event network-vif-plugged-dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1072.348456] env[62923]: DEBUG oslo_concurrency.lockutils [req-2f40f591-10a9-4566-93e2-07ed86ce9124 req-b7b00907-7df0-473a-be35-1ba322949598 service nova] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.348677] env[62923]: DEBUG oslo_concurrency.lockutils [req-2f40f591-10a9-4566-93e2-07ed86ce9124 req-b7b00907-7df0-473a-be35-1ba322949598 service nova] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.348850] env[62923]: DEBUG oslo_concurrency.lockutils [req-2f40f591-10a9-4566-93e2-07ed86ce9124 req-b7b00907-7df0-473a-be35-1ba322949598 service nova] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.349024] env[62923]: DEBUG nova.compute.manager [req-2f40f591-10a9-4566-93e2-07ed86ce9124 req-b7b00907-7df0-473a-be35-1ba322949598 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] No waiting events found dispatching network-vif-plugged-dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1072.349208] env[62923]: WARNING nova.compute.manager [req-2f40f591-10a9-4566-93e2-07ed86ce9124 req-b7b00907-7df0-473a-be35-1ba322949598 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Received unexpected event network-vif-plugged-dea689f8-5e91-490e-980b-8025533b5e90 for instance with vm_state building and task_state spawning. [ 1072.440163] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370492, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.705292] env[62923]: DEBUG oslo_vmware.api [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370493, 'name': ReconfigVM_Task, 'duration_secs': 0.885491} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.705595] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291523', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'name': 'volume-7c87212b-d640-4716-ace2-030c4b7ed621', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '75f9473f-ca67-4bb5-8663-0ce3709885e9', 'attached_at': '', 'detached_at': '', 'volume_id': '7c87212b-d640-4716-ace2-030c4b7ed621', 'serial': '7c87212b-d640-4716-ace2-030c4b7ed621'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1072.773772] env[62923]: DEBUG oslo_concurrency.lockutils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.781982] env[62923]: DEBUG nova.network.neutron [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Successfully updated port: dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1072.839508] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b631ef-f378-c152-82a5-3d9fbe742ae5, 'name': SearchDatastore_Task, 'duration_secs': 0.040744} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.839782] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.840090] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] cb87a7b0-5a88-4b58-aea3-ce998cf579b6/cb87a7b0-5a88-4b58-aea3-ce998cf579b6.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1072.840356] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e11aa9a-e290-46ac-87ab-ce7eaabb60fa {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.845831] env[62923]: DEBUG nova.compute.manager [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Received event network-changed-dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1072.846073] env[62923]: DEBUG nova.compute.manager [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Refreshing instance network info cache due to event network-changed-dea689f8-5e91-490e-980b-8025533b5e90. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1072.846250] env[62923]: DEBUG oslo_concurrency.lockutils [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] Acquiring lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1072.846371] env[62923]: DEBUG oslo_concurrency.lockutils [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] Acquired lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.847023] env[62923]: DEBUG nova.network.neutron [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Refreshing network info cache for port dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1072.849238] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1072.849238] env[62923]: value = "task-1370494" [ 1072.849238] env[62923]: _type = "Task" [ 1072.849238] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.857773] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370494, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.942940] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370492, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.041996} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.943357] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] fc9ed87d-147a-47c0-b37e-720f20132b17/fc9ed87d-147a-47c0-b37e-720f20132b17.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1072.943667] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1072.943987] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f837d84-59aa-4413-9f7d-30247d79ff05 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.950631] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1072.950631] env[62923]: value = "task-1370495" [ 1072.950631] env[62923]: _type = "Task" [ 1072.950631] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.959305] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.254903] env[62923]: DEBUG nova.objects.instance [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'flavor' on Instance uuid 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.284999] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.361766] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370494, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.386775] env[62923]: DEBUG nova.network.neutron [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1073.454170] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b826c4d1-3e31-49da-8e16-8e512599912c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.454602] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b826c4d1-3e31-49da-8e16-8e512599912c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.465483] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063353} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.466361] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1073.467237] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f548559-a25b-4003-98a6-826170e38242 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.490378] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] fc9ed87d-147a-47c0-b37e-720f20132b17/fc9ed87d-147a-47c0-b37e-720f20132b17.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.491388] env[62923]: DEBUG nova.network.neutron [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.492650] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-952cdcf2-b43f-4bca-a777-9a035dad10cc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.507450] env[62923]: DEBUG oslo_concurrency.lockutils [req-5e755d77-32eb-4342-9089-8778dc56e75e req-449189a4-9e10-4c04-a920-2c6d23f070a9 service nova] Releasing lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1073.508055] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.508214] env[62923]: DEBUG nova.network.neutron [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.513726] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1073.513726] env[62923]: value = "task-1370496" [ 1073.513726] env[62923]: _type = "Task" [ 1073.513726] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.522266] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.847635] env[62923]: DEBUG oslo_concurrency.lockutils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.847914] env[62923]: DEBUG oslo_concurrency.lockutils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.848173] env[62923]: INFO nova.compute.manager [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Attaching volume 6f0382bb-6899-496a-a452-9cdc00d9a14f to /dev/sdb [ 1073.862616] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370494, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655199} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.862869] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] cb87a7b0-5a88-4b58-aea3-ce998cf579b6/cb87a7b0-5a88-4b58-aea3-ce998cf579b6.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1073.863102] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1073.863354] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76648c81-f31a-49cc-ba58-ed16ee5b921c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.869191] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1073.869191] env[62923]: value = "task-1370497" [ 1073.869191] env[62923]: _type = "Task" [ 1073.869191] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.876510] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.881138] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b72c1f-0fa9-42d2-bfa5-cbeb0c6720b3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.888454] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04473738-694f-4a64-982a-4080fe07a76b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.901964] env[62923]: DEBUG nova.virt.block_device [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updating existing volume attachment record: cffc8a2a-a4b1-4db2-90a5-a7fd29ba0daf {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1073.960486] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1074.022429] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370496, 'name': ReconfigVM_Task, 'duration_secs': 0.25611} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.022718] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Reconfigured VM instance instance-0000006a to attach disk [datastore2] fc9ed87d-147a-47c0-b37e-720f20132b17/fc9ed87d-147a-47c0-b37e-720f20132b17.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.023359] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d979e2a0-4850-4653-959e-5dbc07e253b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.029120] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1074.029120] env[62923]: value = "task-1370498" [ 1074.029120] env[62923]: _type = "Task" [ 1074.029120] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.039532] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370498, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.039532] env[62923]: DEBUG nova.network.neutron [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1074.177261] env[62923]: DEBUG nova.network.neutron [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating instance_info_cache with network_info: [{"id": "dea689f8-5e91-490e-980b-8025533b5e90", "address": "fa:16:3e:22:16:a6", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdea689f8-5e", "ovs_interfaceid": "dea689f8-5e91-490e-980b-8025533b5e90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.263529] env[62923]: DEBUG oslo_concurrency.lockutils [None req-59a7d362-ebc3-4249-89bc-7ce336b816e2 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.738s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.379099] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07187} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.379449] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1074.380259] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1ebc06-f0bc-4221-a59d-a6282dd7c3a1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.403129] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] cb87a7b0-5a88-4b58-aea3-ce998cf579b6/cb87a7b0-5a88-4b58-aea3-ce998cf579b6.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.403452] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19d2da25-5ef9-40c0-a833-04eed04f434c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.424574] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1074.424574] env[62923]: value = "task-1370500" [ 1074.424574] env[62923]: _type = "Task" [ 1074.424574] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.433089] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370500, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.485429] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.485701] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.487429] env[62923]: INFO nova.compute.claims [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.538840] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370498, 'name': Rename_Task, 'duration_secs': 0.140132} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.539161] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1074.539423] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4821e2c5-5609-409e-a4c2-4a515ae0b657 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.545304] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1074.545304] env[62923]: value = "task-1370501" [ 1074.545304] env[62923]: _type = "Task" [ 1074.545304] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.552661] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.680503] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.680875] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Instance network_info: |[{"id": "dea689f8-5e91-490e-980b-8025533b5e90", "address": "fa:16:3e:22:16:a6", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdea689f8-5e", "ovs_interfaceid": "dea689f8-5e91-490e-980b-8025533b5e90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1074.681432] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:16:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1d25020-c621-4388-ac1d-de55bfefbe50', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dea689f8-5e91-490e-980b-8025533b5e90', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1074.689229] env[62923]: DEBUG oslo.service.loopingcall [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1074.689512] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1074.689800] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d893a6df-0e23-4de7-9691-a623b89ccbd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.710246] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1074.710246] env[62923]: value = "task-1370502" [ 1074.710246] env[62923]: _type = "Task" [ 1074.710246] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.718137] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370502, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.934940] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370500, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.948541] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.948861] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.949148] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.949352] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.949527] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.951852] env[62923]: INFO nova.compute.manager [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Terminating instance [ 1074.953869] env[62923]: DEBUG nova.compute.manager [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1074.954083] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1074.955011] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eadac0f-d6d1-4ed3-b6d7-1db04c1a844f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.966767] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.967129] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71ef9d6d-a028-452a-b3bf-f99b49d845de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.972860] env[62923]: DEBUG oslo_vmware.api [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1074.972860] env[62923]: value = "task-1370503" [ 1074.972860] env[62923]: _type = "Task" [ 1074.972860] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.982073] env[62923]: DEBUG oslo_vmware.api [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370503, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.056231] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370501, 'name': PowerOnVM_Task, 'duration_secs': 0.462906} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.056631] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.056903] env[62923]: INFO nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1075.057191] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1075.058237] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58ea4df-aaee-4032-8f50-a9eb51a3dd89 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.220674] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370502, 'name': CreateVM_Task, 'duration_secs': 0.453187} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.220820] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1075.221471] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.221642] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.221970] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1075.222244] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-186f223d-3634-4953-ba19-cb1e1add73e5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.226977] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1075.226977] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528d760f-fa3b-30f2-a993-61b04e5cf353" [ 1075.226977] env[62923]: _type = "Task" [ 1075.226977] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.234555] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528d760f-fa3b-30f2-a993-61b04e5cf353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.436701] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370500, 'name': ReconfigVM_Task, 'duration_secs': 0.685718} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.436991] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Reconfigured VM instance instance-0000006b to attach disk [datastore2] cb87a7b0-5a88-4b58-aea3-ce998cf579b6/cb87a7b0-5a88-4b58-aea3-ce998cf579b6.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1075.437685] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7d5c749-564e-4d95-9a74-623f4623ea8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.444258] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1075.444258] env[62923]: value = "task-1370504" [ 1075.444258] env[62923]: _type = "Task" [ 1075.444258] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.451732] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370504, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.482040] env[62923]: DEBUG oslo_vmware.api [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370503, 'name': PowerOffVM_Task, 'duration_secs': 0.199272} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.482371] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.482416] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1075.482654] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-340c79f4-93d5-4492-9434-3156292dc738 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.541090] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1075.541090] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1075.541362] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleting the datastore file [datastore2] 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.541571] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01acc058-076c-41b6-979d-52d4a81f9d33 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.550013] env[62923]: DEBUG oslo_vmware.api [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1075.550013] env[62923]: value = "task-1370506" [ 1075.550013] env[62923]: _type = "Task" [ 1075.550013] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.558677] env[62923]: DEBUG oslo_vmware.api [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.578268] env[62923]: INFO nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Took 17.24 seconds to build instance. [ 1075.615412] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3869fd1-6a5d-4eca-b31d-f31848a9e923 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.623144] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a7cfd5-ad4c-4511-9e95-0dca7fc1899b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.654385] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488cb898-4862-4cf8-9dbe-90efe459d086 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.661747] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295ca61a-9103-4172-824e-19a2007cfacc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.674471] env[62923]: DEBUG nova.compute.provider_tree [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1075.737657] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]528d760f-fa3b-30f2-a993-61b04e5cf353, 'name': SearchDatastore_Task, 'duration_secs': 0.024253} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.737920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1075.738171] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1075.738404] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.738552] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.738730] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1075.739013] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b51d027-07de-4b5c-8cee-6c5c709d6270 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.746681] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1075.746855] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1075.747802] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d64231d0-ecac-4fac-a243-994751910c83 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.752338] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1075.752338] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520a7371-e252-5395-454e-c5f252da8342" [ 1075.752338] env[62923]: _type = "Task" [ 1075.752338] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.759785] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520a7371-e252-5395-454e-c5f252da8342, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.954129] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370504, 'name': Rename_Task, 'duration_secs': 0.14417} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.954404] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1075.954673] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5c744d9-7e74-42ec-b455-71b5fb91a943 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.964867] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1075.964867] env[62923]: value = "task-1370507" [ 1075.964867] env[62923]: _type = "Task" [ 1075.964867] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.972551] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.062027] env[62923]: DEBUG oslo_vmware.api [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136289} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.062205] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.062466] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.062719] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.062967] env[62923]: INFO nova.compute.manager [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1076.063315] env[62923]: DEBUG oslo.service.loopingcall [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.063570] env[62923]: DEBUG nova.compute.manager [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1076.063701] env[62923]: DEBUG nova.network.neutron [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.081748] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.755s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.196653] env[62923]: ERROR nova.scheduler.client.report [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [req-dcd650f8-7719-42c9-9d5c-3f3d26560e23] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dcd650f8-7719-42c9-9d5c-3f3d26560e23"}]} [ 1076.212380] env[62923]: DEBUG nova.scheduler.client.report [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1076.226967] env[62923]: DEBUG nova.scheduler.client.report [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1076.227217] env[62923]: DEBUG nova.compute.provider_tree [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.238314] env[62923]: DEBUG nova.scheduler.client.report [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1076.258465] env[62923]: DEBUG nova.scheduler.client.report [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1076.264580] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520a7371-e252-5395-454e-c5f252da8342, 'name': SearchDatastore_Task, 'duration_secs': 0.008466} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.265458] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-493b3d12-f04d-4851-9aef-ddd59d055468 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.270675] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1076.270675] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bb870-954a-cea2-2c2a-e09104209dac" [ 1076.270675] env[62923]: _type = "Task" [ 1076.270675] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.282286] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]522bb870-954a-cea2-2c2a-e09104209dac, 'name': SearchDatastore_Task, 'duration_secs': 0.008962} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.282572] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.282882] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] bc86897e-85d6-46ea-bf66-2df7c6ed8fa0/bc86897e-85d6-46ea-bf66-2df7c6ed8fa0.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1076.283186] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc8e12b3-a5b9-408f-aadb-134a1195b5a0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.290429] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1076.290429] env[62923]: value = "task-1370509" [ 1076.290429] env[62923]: _type = "Task" [ 1076.290429] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.299893] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.376659] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abefea4d-7982-41d3-bbf0-f36374dd4ce7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.384714] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71c2fc8-8621-4cfc-ad73-93b1f8f06f5d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.417188] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd68260-c182-4bc6-b686-dc5084676faf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.424768] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c15288-85e9-4257-acb9-7b3f327b0d21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.438084] env[62923]: DEBUG nova.compute.provider_tree [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.475367] env[62923]: DEBUG oslo_vmware.api [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370507, 'name': PowerOnVM_Task, 'duration_secs': 0.491835} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.475648] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1076.475853] env[62923]: INFO nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Took 9.81 seconds to spawn the instance on the hypervisor. [ 1076.476047] env[62923]: DEBUG nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1076.476930] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3671c46c-b0ff-4159-989c-1d37303a297c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.585187] env[62923]: DEBUG nova.compute.manager [req-c7632412-3653-4a6f-9bee-38231d769e59 req-9b87199a-7c07-4395-9bc2-f65a73c01ec9 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Received event network-vif-deleted-545dfb40-7ae4-4d69-86f8-0d334ced67ff {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1076.585187] env[62923]: INFO nova.compute.manager [req-c7632412-3653-4a6f-9bee-38231d769e59 req-9b87199a-7c07-4395-9bc2-f65a73c01ec9 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Neutron deleted interface 545dfb40-7ae4-4d69-86f8-0d334ced67ff; detaching it from the instance and deleting it from the info cache [ 1076.585187] env[62923]: DEBUG nova.network.neutron [req-c7632412-3653-4a6f-9bee-38231d769e59 req-9b87199a-7c07-4395-9bc2-f65a73c01ec9 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.800750] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370509, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.941122] env[62923]: DEBUG nova.scheduler.client.report [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1076.999322] env[62923]: INFO nova.compute.manager [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Took 18.64 seconds to build instance. [ 1077.061036] env[62923]: DEBUG nova.network.neutron [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.089270] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9249f944-ec55-4fff-91e5-c6c7461caa53 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.098982] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda37472-4776-45ae-91fa-137ec0df967c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.125182] env[62923]: DEBUG nova.compute.manager [req-c7632412-3653-4a6f-9bee-38231d769e59 req-9b87199a-7c07-4395-9bc2-f65a73c01ec9 service nova] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Detach interface failed, port_id=545dfb40-7ae4-4d69-86f8-0d334ced67ff, reason: Instance 75f9473f-ca67-4bb5-8663-0ce3709885e9 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1077.302047] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519702} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.302329] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] bc86897e-85d6-46ea-bf66-2df7c6ed8fa0/bc86897e-85d6-46ea-bf66-2df7c6ed8fa0.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1077.302541] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1077.302800] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-017eb88e-bcf2-45d8-829b-e92b3aea9553 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.309856] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1077.309856] env[62923]: value = "task-1370510" [ 1077.309856] env[62923]: _type = "Task" [ 1077.309856] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.317564] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.446344] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.960s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.446905] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1077.501570] env[62923]: DEBUG oslo_concurrency.lockutils [None req-9c3feba6-2caa-4282-a496-198941cab8b6 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.144s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.563653] env[62923]: INFO nova.compute.manager [-] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Took 1.50 seconds to deallocate network for instance. [ 1077.821397] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064381} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.821397] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1077.821397] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbf99ab-1751-4337-90e4-201d6b85870a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.842686] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] bc86897e-85d6-46ea-bf66-2df7c6ed8fa0/bc86897e-85d6-46ea-bf66-2df7c6ed8fa0.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1077.844035] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1eef6a3-964b-42eb-97b9-4fb90404891e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.866112] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1077.866112] env[62923]: value = "task-1370511" [ 1077.866112] env[62923]: _type = "Task" [ 1077.866112] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.876660] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370511, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.939628] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.939807] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1077.953047] env[62923]: DEBUG nova.compute.utils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1077.953482] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1077.953651] env[62923]: DEBUG nova.network.neutron [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1078.021452] env[62923]: DEBUG nova.policy [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '676a737149a9418498a55f83760df073', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1cf5e642524949a8366bf54d00593e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1078.070993] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.071301] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.071568] env[62923]: DEBUG nova.objects.instance [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'resources' on Instance uuid 75f9473f-ca67-4bb5-8663-0ce3709885e9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.306569] env[62923]: DEBUG nova.network.neutron [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Successfully created port: 3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1078.362170] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "fc9ed87d-147a-47c0-b37e-720f20132b17" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.362449] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.362675] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "fc9ed87d-147a-47c0-b37e-720f20132b17-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.362861] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.363046] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.365178] env[62923]: INFO nova.compute.manager [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Terminating instance [ 1078.369920] env[62923]: DEBUG nova.compute.manager [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1078.370128] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1078.370886] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11e9e05-855f-4553-91c9-315747374c78 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.378959] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370511, 'name': ReconfigVM_Task, 'duration_secs': 0.410323} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.380915] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] bc86897e-85d6-46ea-bf66-2df7c6ed8fa0/bc86897e-85d6-46ea-bf66-2df7c6ed8fa0.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1078.381550] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1078.381788] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f681390-5724-434f-8b5a-ac54f898fbc0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.383345] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eba693ec-2a09-49f9-9c58-88b06469ac05 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.389671] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1078.389671] env[62923]: value = "task-1370512" [ 1078.389671] env[62923]: _type = "Task" [ 1078.389671] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.390745] env[62923]: DEBUG oslo_vmware.api [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1078.390745] env[62923]: value = "task-1370513" [ 1078.390745] env[62923]: _type = "Task" [ 1078.390745] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.403716] env[62923]: DEBUG oslo_vmware.api [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.406750] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370512, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.408231] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.408460] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.408674] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1078.408855] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.409104] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1078.411439] env[62923]: INFO nova.compute.manager [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Terminating instance [ 1078.413484] env[62923]: DEBUG nova.compute.manager [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1078.413680] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1078.414619] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81fa6669-fcf6-47f7-98b0-4616d65b7ddc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.422757] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1078.423049] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee9f4821-5d58-4dd7-94c6-83074aeb0446 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.429061] env[62923]: DEBUG oslo_vmware.api [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1078.429061] env[62923]: value = "task-1370514" [ 1078.429061] env[62923]: _type = "Task" [ 1078.429061] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.436362] env[62923]: DEBUG oslo_vmware.api [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.450015] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1078.450330] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291543', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'name': 'volume-6f0382bb-6899-496a-a452-9cdc00d9a14f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aae1a2a3-57da-4846-8240-ac0626e9ebd8', 'attached_at': '', 'detached_at': '', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'serial': '6f0382bb-6899-496a-a452-9cdc00d9a14f'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1078.451371] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d94215-0285-4d4a-8f38-0922a9e1192e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.456433] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1078.472536] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ace704-92d3-4adb-99ec-d3cd9690dc5e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.503169] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-6f0382bb-6899-496a-a452-9cdc00d9a14f/volume-6f0382bb-6899-496a-a452-9cdc00d9a14f.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.503990] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-209a2266-16dc-44e9-988a-ceae4e83d5f0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.525365] env[62923]: DEBUG oslo_vmware.api [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1078.525365] env[62923]: value = "task-1370515" [ 1078.525365] env[62923]: _type = "Task" [ 1078.525365] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.535270] env[62923]: DEBUG oslo_vmware.api [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.685712] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b424fd3-e483-4278-aec0-41132e8edcd3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.692997] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe177f72-60c3-4d81-8a17-0ae0d229eb4b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.721324] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbd87bc-4e7b-4c10-97cc-5221a7f733bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.727855] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023bf915-99f8-4d35-b240-eef0aab597b3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.740183] env[62923]: DEBUG nova.compute.provider_tree [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.902913] env[62923]: DEBUG oslo_vmware.api [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370513, 'name': PowerOffVM_Task, 'duration_secs': 0.186899} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.905861] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1078.906021] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1078.906291] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370512, 'name': Rename_Task, 'duration_secs': 0.147864} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.907109] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5cf116e0-b2f5-44e6-99f0-b4bb7b74d8a7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.908097] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1078.908329] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb540c64-252b-4abc-bb75-2a91ffeaf195 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.913975] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1078.913975] env[62923]: value = "task-1370517" [ 1078.913975] env[62923]: _type = "Task" [ 1078.913975] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.925321] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370517, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.937579] env[62923]: DEBUG oslo_vmware.api [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370514, 'name': PowerOffVM_Task, 'duration_secs': 0.175687} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.937854] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1078.938157] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1078.938421] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f56999d9-669b-4075-9ecb-871be481cdff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.970359] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1078.970599] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1078.970783] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleting the datastore file [datastore2] fc9ed87d-147a-47c0-b37e-720f20132b17 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.971098] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ba85fe0-37f2-40ab-b5a2-18046d7a7047 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.980650] env[62923]: DEBUG oslo_vmware.api [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1078.980650] env[62923]: value = "task-1370519" [ 1078.980650] env[62923]: _type = "Task" [ 1078.980650] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.990524] env[62923]: DEBUG oslo_vmware.api [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.000925] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.000925] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.001126] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleting the datastore file [datastore2] cb87a7b0-5a88-4b58-aea3-ce998cf579b6 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.002041] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-caa5e4cb-e730-4730-b8e3-f30600bd0303 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.008347] env[62923]: DEBUG oslo_vmware.api [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for the task: (returnval){ [ 1079.008347] env[62923]: value = "task-1370520" [ 1079.008347] env[62923]: _type = "Task" [ 1079.008347] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.016671] env[62923]: DEBUG oslo_vmware.api [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.034232] env[62923]: DEBUG oslo_vmware.api [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370515, 'name': ReconfigVM_Task, 'duration_secs': 0.397354} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.034454] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-6f0382bb-6899-496a-a452-9cdc00d9a14f/volume-6f0382bb-6899-496a-a452-9cdc00d9a14f.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.039713] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6f36100-f0a6-4628-be04-ec6d3565db51 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.054722] env[62923]: DEBUG oslo_vmware.api [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1079.054722] env[62923]: value = "task-1370521" [ 1079.054722] env[62923]: _type = "Task" [ 1079.054722] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.062599] env[62923]: DEBUG oslo_vmware.api [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.243740] env[62923]: DEBUG nova.scheduler.client.report [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1079.424890] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370517, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.478971] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1079.489663] env[62923]: DEBUG oslo_vmware.api [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208558} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.489911] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.490125] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.490308] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.490479] env[62923]: INFO nova.compute.manager [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1079.490717] env[62923]: DEBUG oslo.service.loopingcall [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1079.490907] env[62923]: DEBUG nova.compute.manager [-] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1079.490998] env[62923]: DEBUG nova.network.neutron [-] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1079.503662] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1079.503962] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1079.504191] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.504482] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1079.504650] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.505056] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1079.505056] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1079.505191] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1079.505359] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1079.505525] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1079.505701] env[62923]: DEBUG nova.virt.hardware [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1079.506522] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7061e5-a5ec-4bae-8563-b0b2568665a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.516333] env[62923]: DEBUG oslo_vmware.api [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Task: {'id': task-1370520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197043} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.518277] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.518473] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.518654] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.518847] env[62923]: INFO nova.compute.manager [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1079.519108] env[62923]: DEBUG oslo.service.loopingcall [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1079.519351] env[62923]: DEBUG nova.compute.manager [-] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1079.519445] env[62923]: DEBUG nova.network.neutron [-] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1079.521864] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1915347a-a77b-4f60-97b7-ce5351a57d43 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.564771] env[62923]: DEBUG oslo_vmware.api [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370521, 'name': ReconfigVM_Task, 'duration_secs': 0.135548} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.565079] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291543', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'name': 'volume-6f0382bb-6899-496a-a452-9cdc00d9a14f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aae1a2a3-57da-4846-8240-ac0626e9ebd8', 'attached_at': '', 'detached_at': '', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'serial': '6f0382bb-6899-496a-a452-9cdc00d9a14f'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1079.713366] env[62923]: DEBUG nova.compute.manager [req-7536de53-1446-4f3a-b224-fd151cc12696 req-0f0e768c-1b0b-4446-bfed-f7ce26435f6b service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Received event network-vif-plugged-3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1079.713366] env[62923]: DEBUG oslo_concurrency.lockutils [req-7536de53-1446-4f3a-b224-fd151cc12696 req-0f0e768c-1b0b-4446-bfed-f7ce26435f6b service nova] Acquiring lock "b826c4d1-3e31-49da-8e16-8e512599912c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.713475] env[62923]: DEBUG oslo_concurrency.lockutils [req-7536de53-1446-4f3a-b224-fd151cc12696 req-0f0e768c-1b0b-4446-bfed-f7ce26435f6b service nova] Lock "b826c4d1-3e31-49da-8e16-8e512599912c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.713648] env[62923]: DEBUG oslo_concurrency.lockutils [req-7536de53-1446-4f3a-b224-fd151cc12696 req-0f0e768c-1b0b-4446-bfed-f7ce26435f6b service nova] Lock "b826c4d1-3e31-49da-8e16-8e512599912c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.713822] env[62923]: DEBUG nova.compute.manager [req-7536de53-1446-4f3a-b224-fd151cc12696 req-0f0e768c-1b0b-4446-bfed-f7ce26435f6b service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] No waiting events found dispatching network-vif-plugged-3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1079.713986] env[62923]: WARNING nova.compute.manager [req-7536de53-1446-4f3a-b224-fd151cc12696 req-0f0e768c-1b0b-4446-bfed-f7ce26435f6b service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Received unexpected event network-vif-plugged-3b76277b-efa1-43eb-908f-60a7e2a9f7ad for instance with vm_state building and task_state spawning. [ 1079.748611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.677s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.778210] env[62923]: INFO nova.scheduler.client.report [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted allocations for instance 75f9473f-ca67-4bb5-8663-0ce3709885e9 [ 1079.796898] env[62923]: DEBUG nova.network.neutron [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Successfully updated port: 3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.908680] env[62923]: DEBUG nova.compute.manager [req-c79a78fc-87b6-4e67-8004-a5e04d458a3d req-444c3167-362e-44bc-9885-aea99bc72e12 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Received event network-vif-deleted-e0cf2509-917a-467c-a0f2-4bcc7abb8bbf {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1079.908927] env[62923]: INFO nova.compute.manager [req-c79a78fc-87b6-4e67-8004-a5e04d458a3d req-444c3167-362e-44bc-9885-aea99bc72e12 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Neutron deleted interface e0cf2509-917a-467c-a0f2-4bcc7abb8bbf; detaching it from the instance and deleting it from the info cache [ 1079.909052] env[62923]: DEBUG nova.network.neutron [req-c79a78fc-87b6-4e67-8004-a5e04d458a3d req-444c3167-362e-44bc-9885-aea99bc72e12 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.936291] env[62923]: DEBUG oslo_vmware.api [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370517, 'name': PowerOnVM_Task, 'duration_secs': 0.52452} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.936525] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.936750] env[62923]: INFO nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Took 8.32 seconds to spawn the instance on the hypervisor. [ 1079.936942] env[62923]: DEBUG nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1079.937776] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d69e13-95b5-4813-8950-1ab1e8efb5ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.967291] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.967291] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquired lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.967427] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Forcefully refreshing network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1080.285861] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bf7c356c-4e3b-4159-8f38-7204f6330e95 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "75f9473f-ca67-4bb5-8663-0ce3709885e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.337s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.299582] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.299730] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.299896] env[62923]: DEBUG nova.network.neutron [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.384269] env[62923]: DEBUG nova.network.neutron [-] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.388805] env[62923]: DEBUG nova.network.neutron [-] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.412416] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-179d6f02-7afc-44b6-97af-5f889438e4ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.422806] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da942baf-5dec-4901-9735-fb143f295723 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.455362] env[62923]: DEBUG nova.compute.manager [req-c79a78fc-87b6-4e67-8004-a5e04d458a3d req-444c3167-362e-44bc-9885-aea99bc72e12 service nova] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Detach interface failed, port_id=e0cf2509-917a-467c-a0f2-4bcc7abb8bbf, reason: Instance fc9ed87d-147a-47c0-b37e-720f20132b17 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1080.458906] env[62923]: INFO nova.compute.manager [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Took 19.62 seconds to build instance. [ 1080.469746] env[62923]: DEBUG nova.compute.utils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Can not refresh info_cache because instance was not found {{(pid=62923) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1080.489943] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.604249] env[62923]: DEBUG nova.objects.instance [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'flavor' on Instance uuid aae1a2a3-57da-4846-8240-ac0626e9ebd8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.832797] env[62923]: DEBUG nova.network.neutron [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.887230] env[62923]: INFO nova.compute.manager [-] [instance: fc9ed87d-147a-47c0-b37e-720f20132b17] Took 1.40 seconds to deallocate network for instance. [ 1080.891742] env[62923]: INFO nova.compute.manager [-] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Took 1.37 seconds to deallocate network for instance. [ 1080.961410] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d8716dd9-8a87-4855-8903-dcbcb4c733bd tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.129s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.970316] env[62923]: DEBUG nova.network.neutron [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updating instance_info_cache with network_info: [{"id": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "address": "fa:16:3e:8c:6b:5d", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b76277b-ef", "ovs_interfaceid": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.072685] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.109485] env[62923]: DEBUG oslo_concurrency.lockutils [None req-80652610-01af-410d-87af-b0ae107ff72b tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.261s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.325844] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.326170] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.398358] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.399641] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.399867] env[62923]: DEBUG nova.objects.instance [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lazy-loading 'resources' on Instance uuid fc9ed87d-147a-47c0-b37e-720f20132b17 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.406611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.473022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.473363] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Instance network_info: |[{"id": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "address": "fa:16:3e:8c:6b:5d", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b76277b-ef", "ovs_interfaceid": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1081.473878] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:6b:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b76277b-efa1-43eb-908f-60a7e2a9f7ad', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.482436] env[62923]: DEBUG oslo.service.loopingcall [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.483778] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.484112] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dd91355-0caf-4688-8f3a-c0cb73713149 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.517447] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.517447] env[62923]: value = "task-1370522" [ 1081.517447] env[62923]: _type = "Task" [ 1081.517447] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.525446] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370522, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.574637] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Releasing lock "refresh_cache-75f9473f-ca67-4bb5-8663-0ce3709885e9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.574865] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Updated the network info_cache for instance {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1081.575122] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1081.575308] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1081.575432] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1081.575586] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1081.575716] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Cleaning up deleted instances {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1081.741850] env[62923]: DEBUG nova.compute.manager [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Received event network-changed-3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1081.742186] env[62923]: DEBUG nova.compute.manager [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Refreshing instance network info cache due to event network-changed-3b76277b-efa1-43eb-908f-60a7e2a9f7ad. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1081.742576] env[62923]: DEBUG oslo_concurrency.lockutils [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] Acquiring lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.742881] env[62923]: DEBUG oslo_concurrency.lockutils [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] Acquired lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.743182] env[62923]: DEBUG nova.network.neutron [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Refreshing network info cache for port 3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.829682] env[62923]: INFO nova.compute.manager [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Detaching volume 6f0382bb-6899-496a-a452-9cdc00d9a14f [ 1081.870472] env[62923]: INFO nova.virt.block_device [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Attempting to driver detach volume 6f0382bb-6899-496a-a452-9cdc00d9a14f from mountpoint /dev/sdb [ 1081.870714] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1081.870904] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291543', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'name': 'volume-6f0382bb-6899-496a-a452-9cdc00d9a14f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aae1a2a3-57da-4846-8240-ac0626e9ebd8', 'attached_at': '', 'detached_at': '', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'serial': '6f0382bb-6899-496a-a452-9cdc00d9a14f'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1081.871815] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c70f7b-d019-459b-95d7-d223a262babc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.901701] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a13074-f77e-425c-9537-49d9db4fa2b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.916559] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c637422-1771-40ee-a67e-87958d6c60ed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.946972] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133a1076-b314-40a7-968b-9a2bfa7b1824 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.951269] env[62923]: DEBUG nova.compute.manager [req-c8d453d7-0ca0-4970-9004-1935a4447f89 req-592f4620-4bd4-48d5-8f3c-4f070f5ff0bd service nova] [instance: cb87a7b0-5a88-4b58-aea3-ce998cf579b6] Received event network-vif-deleted-a84ec101-c21e-4c0b-b736-a05c336817e9 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1081.967498] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] The volume has not been displaced from its original location: [datastore1] volume-6f0382bb-6899-496a-a452-9cdc00d9a14f/volume-6f0382bb-6899-496a-a452-9cdc00d9a14f.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1081.976070] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1081.980303] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40e426b6-8dd8-40c9-abe7-7040b48159eb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.008483] env[62923]: DEBUG oslo_vmware.api [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1082.008483] env[62923]: value = "task-1370523" [ 1082.008483] env[62923]: _type = "Task" [ 1082.008483] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.021148] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.021394] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.029417] env[62923]: DEBUG oslo_vmware.api [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.035259] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370522, 'name': CreateVM_Task, 'duration_secs': 0.340591} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.037677] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.038714] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.038879] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.039217] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1082.039478] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3717141f-f32d-4e29-bee8-0152dc4ea9bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.043969] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1082.043969] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d45f60-78c6-dc4f-7345-9b415d757102" [ 1082.043969] env[62923]: _type = "Task" [ 1082.043969] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.052897] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d45f60-78c6-dc4f-7345-9b415d757102, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.087775] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] There are 41 instances to clean {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1082.088135] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 996cb68a-4a18-488d-890f-ace24dcd4c42] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.105765] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d15c58c-2b4b-4576-99c1-7b4394c7e909 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.115063] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbce0b6-5900-45f5-8a0a-eb669d737a77 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.148154] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bba38ba-6199-4c86-b2bf-449ba27383e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.156711] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e546e19c-7316-434a-807b-6cb7af3d80c9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.177530] env[62923]: DEBUG nova.compute.provider_tree [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.462727] env[62923]: DEBUG nova.network.neutron [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updated VIF entry in instance network info cache for port 3b76277b-efa1-43eb-908f-60a7e2a9f7ad. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.463144] env[62923]: DEBUG nova.network.neutron [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updating instance_info_cache with network_info: [{"id": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "address": "fa:16:3e:8c:6b:5d", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b76277b-ef", "ovs_interfaceid": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.518191] env[62923]: DEBUG oslo_vmware.api [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370523, 'name': ReconfigVM_Task, 'duration_secs': 0.229967} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.518475] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1082.523084] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1082.525695] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a90f33e3-048a-4994-84aa-ff17d9bd57cc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.539954] env[62923]: DEBUG oslo_vmware.api [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1082.539954] env[62923]: value = "task-1370524" [ 1082.539954] env[62923]: _type = "Task" [ 1082.539954] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.550300] env[62923]: DEBUG oslo_vmware.api [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.556903] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52d45f60-78c6-dc4f-7345-9b415d757102, 'name': SearchDatastore_Task, 'duration_secs': 0.009909} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.557267] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.558364] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.558364] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.558364] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.558364] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.558364] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef620958-f941-43e0-bf2e-5ae75e51cd74 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.566904] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.567123] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1082.567865] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7a8d18-efa5-48b5-a507-8fe2a066b324 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.573078] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1082.573078] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520d8566-e769-3aec-c7aa-b338defa62e0" [ 1082.573078] env[62923]: _type = "Task" [ 1082.573078] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.580443] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520d8566-e769-3aec-c7aa-b338defa62e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.591188] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 19e75201-8918-4b27-928b-633849222daf] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.681588] env[62923]: DEBUG nova.scheduler.client.report [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1082.965954] env[62923]: DEBUG oslo_concurrency.lockutils [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] Releasing lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.966339] env[62923]: DEBUG nova.compute.manager [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Received event network-changed-dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1082.966574] env[62923]: DEBUG nova.compute.manager [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Refreshing instance network info cache due to event network-changed-dea689f8-5e91-490e-980b-8025533b5e90. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1082.966808] env[62923]: DEBUG oslo_concurrency.lockutils [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] Acquiring lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.966969] env[62923]: DEBUG oslo_concurrency.lockutils [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] Acquired lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.967149] env[62923]: DEBUG nova.network.neutron [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Refreshing network info cache for port dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.050208] env[62923]: DEBUG oslo_vmware.api [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370524, 'name': ReconfigVM_Task, 'duration_secs': 0.12808} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.050519] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291543', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'name': 'volume-6f0382bb-6899-496a-a452-9cdc00d9a14f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'aae1a2a3-57da-4846-8240-ac0626e9ebd8', 'attached_at': '', 'detached_at': '', 'volume_id': '6f0382bb-6899-496a-a452-9cdc00d9a14f', 'serial': '6f0382bb-6899-496a-a452-9cdc00d9a14f'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1083.083069] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]520d8566-e769-3aec-c7aa-b338defa62e0, 'name': SearchDatastore_Task, 'duration_secs': 0.013493} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.083982] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55f51aa8-903c-428c-9885-4c2356f808f7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.088841] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1083.088841] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52378a9b-052a-e001-d08a-900b6221672c" [ 1083.088841] env[62923]: _type = "Task" [ 1083.088841] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.096543] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 62889af3-06e9-4f5e-9ab0-87024e0678ca] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.098377] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52378a9b-052a-e001-d08a-900b6221672c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.187015] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.189959] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.190989] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.786s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.190989] env[62923]: DEBUG nova.objects.instance [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lazy-loading 'resources' on Instance uuid cb87a7b0-5a88-4b58-aea3-ce998cf579b6 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.209534] env[62923]: INFO nova.scheduler.client.report [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted allocations for instance fc9ed87d-147a-47c0-b37e-720f20132b17 [ 1083.600878] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52378a9b-052a-e001-d08a-900b6221672c, 'name': SearchDatastore_Task, 'duration_secs': 0.011269} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.602108] env[62923]: DEBUG nova.objects.instance [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'flavor' on Instance uuid aae1a2a3-57da-4846-8240-ac0626e9ebd8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.603400] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.603665] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] b826c4d1-3e31-49da-8e16-8e512599912c/b826c4d1-3e31-49da-8e16-8e512599912c.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.604542] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d1026124-821b-44c1-b1f6-257597ce1195] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.605783] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c59dfd8-dc0c-4e78-9bbd-4457a9e53f38 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.612459] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1083.612459] env[62923]: value = "task-1370525" [ 1083.612459] env[62923]: _type = "Task" [ 1083.612459] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.621014] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.685862] env[62923]: DEBUG nova.network.neutron [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updated VIF entry in instance network info cache for port dea689f8-5e91-490e-980b-8025533b5e90. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.686248] env[62923]: DEBUG nova.network.neutron [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating instance_info_cache with network_info: [{"id": "dea689f8-5e91-490e-980b-8025533b5e90", "address": "fa:16:3e:22:16:a6", "network": {"id": "f9845a8d-f3e3-4080-8b11-bca02678b9c5", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1137339094-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a855374ba4624ee78230d07b85b2ab8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1d25020-c621-4388-ac1d-de55bfefbe50", "external-id": "nsx-vlan-transportzone-573", "segmentation_id": 573, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdea689f8-5e", "ovs_interfaceid": "dea689f8-5e91-490e-980b-8025533b5e90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.717400] env[62923]: DEBUG oslo_concurrency.lockutils [None req-88a8d65d-49e1-4caf-a9d9-793d9ecff660 tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "fc9ed87d-147a-47c0-b37e-720f20132b17" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.355s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.795471] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804f2b15-2366-485d-ab3d-bc3214ef08bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.803064] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9b95aa-21ec-4b84-8e46-48216548f44e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.833727] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245e921c-957c-4498-98ad-ab4d59bcafbb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.841480] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc31567-3d7a-4f0e-a6cc-02453ee9c252 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.855159] env[62923]: DEBUG nova.compute.provider_tree [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.108819] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 63b16034-87f0-433f-b48c-0e936642534c] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.122425] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370525, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.189120] env[62923]: DEBUG oslo_concurrency.lockutils [req-924cea38-ee4d-49cc-8e29-7ba2a265772f req-24ee4179-3cbf-4f13-9042-bbb595bb1286 service nova] Releasing lock "refresh_cache-bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.358421] env[62923]: DEBUG nova.scheduler.client.report [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1084.613159] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: cb87f39c-38eb-4f8e-9ce2-ad5abdee3aa0] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.615716] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6e4690e4-609a-4639-b28a-637a6c0a7770 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.290s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.625280] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370525, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581137} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.625576] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] b826c4d1-3e31-49da-8e16-8e512599912c/b826c4d1-3e31-49da-8e16-8e512599912c.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.625742] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.626510] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50dac4e4-38fb-438b-be40-e778a6f6ceed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.633732] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1084.633732] env[62923]: value = "task-1370526" [ 1084.633732] env[62923]: _type = "Task" [ 1084.633732] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.642230] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370526, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.863411] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.865643] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.676s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.867433] env[62923]: INFO nova.compute.claims [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1084.879900] env[62923]: INFO nova.scheduler.client.report [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Deleted allocations for instance cb87a7b0-5a88-4b58-aea3-ce998cf579b6 [ 1085.118016] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: b459a438-c287-4fbd-80f5-b5d3c31b83c9] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.145086] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370526, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.376308} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.145330] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.146082] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd49647-8543-4f9d-80dc-9b0cc86ae9f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.170835] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] b826c4d1-3e31-49da-8e16-8e512599912c/b826c4d1-3e31-49da-8e16-8e512599912c.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.171860] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78c37a13-9a55-4b22-92f0-6d99ff353688 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.192382] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1085.192382] env[62923]: value = "task-1370527" [ 1085.192382] env[62923]: _type = "Task" [ 1085.192382] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.200140] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.387503] env[62923]: DEBUG oslo_concurrency.lockutils [None req-15cfd917-0b5c-4f18-b331-c6304db61b7a tempest-MultipleCreateTestJSON-1885654464 tempest-MultipleCreateTestJSON-1885654464-project-member] Lock "cb87a7b0-5a88-4b58-aea3-ce998cf579b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.979s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.564373] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.564516] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.564695] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.564908] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.565097] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.567252] env[62923]: INFO nova.compute.manager [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Terminating instance [ 1085.569068] env[62923]: DEBUG nova.compute.manager [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1085.569267] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1085.570099] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3bd4c7-2138-471f-92e5-570b10539fbc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.578115] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1085.578353] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdaca689-a0c0-4561-96bb-546478cd2b44 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.584157] env[62923]: DEBUG oslo_vmware.api [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1085.584157] env[62923]: value = "task-1370528" [ 1085.584157] env[62923]: _type = "Task" [ 1085.584157] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.591715] env[62923]: DEBUG oslo_vmware.api [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.622043] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 9f0b13d8-eb25-474c-b9bb-80ee9dd4955f] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.702352] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.962858] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cf92cc-17ef-464a-af03-ab147dc90f46 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.973026] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e104296-65ec-426d-b5be-f19c569ef0b4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.001826] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3562674-e927-4d29-ab16-16bfd4267d94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.009107] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99e8204-0773-4052-8d22-12bdc1bb0617 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.022030] env[62923]: DEBUG nova.compute.provider_tree [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.094031] env[62923]: DEBUG oslo_vmware.api [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370528, 'name': PowerOffVM_Task, 'duration_secs': 0.21528} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.094568] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.094744] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1086.094991] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31c17e91-9ea3-4a85-a9dc-7f92de459c53 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.125146] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1b155391-37d9-4186-b70d-84f2dec5af82] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.151703] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.151928] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.152126] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleting the datastore file [datastore1] aae1a2a3-57da-4846-8240-ac0626e9ebd8 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.152395] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d29b6cf-39be-43ef-a1bf-fab301687724 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.159166] env[62923]: DEBUG oslo_vmware.api [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1086.159166] env[62923]: value = "task-1370530" [ 1086.159166] env[62923]: _type = "Task" [ 1086.159166] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.167321] env[62923]: DEBUG oslo_vmware.api [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.203219] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370527, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.527030] env[62923]: DEBUG nova.scheduler.client.report [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1086.629214] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 92a10f0a-4bfd-405a-956e-3ea29a740b28] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.671324] env[62923]: DEBUG oslo_vmware.api [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149548} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.671743] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1086.672069] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1086.672367] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1086.672669] env[62923]: INFO nova.compute.manager [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1086.673034] env[62923]: DEBUG oslo.service.loopingcall [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1086.673349] env[62923]: DEBUG nova.compute.manager [-] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1086.673543] env[62923]: DEBUG nova.network.neutron [-] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1086.705155] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370527, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.032527] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.033106] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1087.116595] env[62923]: DEBUG nova.compute.manager [req-ae449716-847c-4767-a1f2-99ca219f45c1 req-79ce864d-6bf5-4a33-82ec-1ccc95d3df09 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Received event network-vif-deleted-bb561b4b-5c6a-4cc2-b404-07800286d632 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1087.116812] env[62923]: INFO nova.compute.manager [req-ae449716-847c-4767-a1f2-99ca219f45c1 req-79ce864d-6bf5-4a33-82ec-1ccc95d3df09 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Neutron deleted interface bb561b4b-5c6a-4cc2-b404-07800286d632; detaching it from the instance and deleting it from the info cache [ 1087.116983] env[62923]: DEBUG nova.network.neutron [req-ae449716-847c-4767-a1f2-99ca219f45c1 req-79ce864d-6bf5-4a33-82ec-1ccc95d3df09 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.135124] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 906da59a-24ac-4486-a835-62d3f81d3683] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.203827] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370527, 'name': ReconfigVM_Task, 'duration_secs': 1.859253} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.204609] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Reconfigured VM instance instance-0000006d to attach disk [datastore1] b826c4d1-3e31-49da-8e16-8e512599912c/b826c4d1-3e31-49da-8e16-8e512599912c.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.204811] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6e3bad3-881c-45fb-afd0-e4fc6b7c74bf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.210910] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1087.210910] env[62923]: value = "task-1370531" [ 1087.210910] env[62923]: _type = "Task" [ 1087.210910] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.224388] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370531, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.541013] env[62923]: DEBUG nova.compute.utils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1087.542455] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1087.542623] env[62923]: DEBUG nova.network.neutron [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1087.582541] env[62923]: DEBUG nova.policy [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bd260135a8e4b96b52e6aad41ff4e42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '418b805157a74173b5cfe13ea5b61c13', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1087.592030] env[62923]: DEBUG nova.network.neutron [-] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.619767] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae79862f-630f-41d2-9cc2-ae3369d81b4b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.629262] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2f00b1-7e3a-47ef-bceb-643af0c9589a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.640055] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 2d7bceb2-450c-4747-bedb-aa9848450ca9] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.657234] env[62923]: DEBUG nova.compute.manager [req-ae449716-847c-4767-a1f2-99ca219f45c1 req-79ce864d-6bf5-4a33-82ec-1ccc95d3df09 service nova] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Detach interface failed, port_id=bb561b4b-5c6a-4cc2-b404-07800286d632, reason: Instance aae1a2a3-57da-4846-8240-ac0626e9ebd8 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1087.720944] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370531, 'name': Rename_Task, 'duration_secs': 0.230728} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.721234] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.721483] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00c62598-d4bf-4ad0-918b-7744dbc97575 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.727768] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1087.727768] env[62923]: value = "task-1370532" [ 1087.727768] env[62923]: _type = "Task" [ 1087.727768] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.735177] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370532, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.844820] env[62923]: DEBUG nova.network.neutron [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Successfully created port: 9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.045491] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1088.094238] env[62923]: INFO nova.compute.manager [-] [instance: aae1a2a3-57da-4846-8240-ac0626e9ebd8] Took 1.42 seconds to deallocate network for instance. [ 1088.143391] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 4de1c06d-3261-4447-b5bc-a21a91f7a812] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.237707] env[62923]: DEBUG oslo_vmware.api [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370532, 'name': PowerOnVM_Task, 'duration_secs': 0.465825} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.238120] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.238355] env[62923]: INFO nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1088.238536] env[62923]: DEBUG nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1088.239293] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68649333-05c4-42c4-b891-56872118f0dd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.601611] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.601857] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.602113] env[62923]: DEBUG nova.objects.instance [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'resources' on Instance uuid aae1a2a3-57da-4846-8240-ac0626e9ebd8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.646826] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 2a9a93f8-9398-4a19-a149-a1092ceb416d] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.761672] env[62923]: INFO nova.compute.manager [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Took 14.30 seconds to build instance. [ 1088.844934] env[62923]: DEBUG nova.compute.manager [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Received event network-changed-3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1088.845299] env[62923]: DEBUG nova.compute.manager [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Refreshing instance network info cache due to event network-changed-3b76277b-efa1-43eb-908f-60a7e2a9f7ad. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1088.845615] env[62923]: DEBUG oslo_concurrency.lockutils [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] Acquiring lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.845876] env[62923]: DEBUG oslo_concurrency.lockutils [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] Acquired lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.846159] env[62923]: DEBUG nova.network.neutron [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Refreshing network info cache for port 3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.056291] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1089.082494] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1089.082735] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1089.082901] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.083188] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1089.083244] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.083382] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1089.083586] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1089.083747] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1089.083912] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1089.084092] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1089.084331] env[62923]: DEBUG nova.virt.hardware [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1089.085294] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e8cf9a-c421-4fa1-90c8-76dfae486d14 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.093187] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bcc7ea-2836-4296-9c50-b822f6935dcd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.150870] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: eaa654f9-023d-4514-930d-6bebd421325a] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.205613] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3dddb5-2150-4013-8c13-2c8585ded993 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.215124] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6d724e-0d04-4cd0-afca-6b7b643493d3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.251427] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edeecf9-cd24-4d49-9db5-636fdc905cdb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.254985] env[62923]: DEBUG nova.compute.manager [req-308556a1-1825-46ae-bee7-1029a902615d req-d96e99b1-e9e6-4cfd-bc1f-2f9f07d2a087 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-vif-plugged-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1089.255278] env[62923]: DEBUG oslo_concurrency.lockutils [req-308556a1-1825-46ae-bee7-1029a902615d req-d96e99b1-e9e6-4cfd-bc1f-2f9f07d2a087 service nova] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.255565] env[62923]: DEBUG oslo_concurrency.lockutils [req-308556a1-1825-46ae-bee7-1029a902615d req-d96e99b1-e9e6-4cfd-bc1f-2f9f07d2a087 service nova] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.255779] env[62923]: DEBUG oslo_concurrency.lockutils [req-308556a1-1825-46ae-bee7-1029a902615d req-d96e99b1-e9e6-4cfd-bc1f-2f9f07d2a087 service nova] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.256015] env[62923]: DEBUG nova.compute.manager [req-308556a1-1825-46ae-bee7-1029a902615d req-d96e99b1-e9e6-4cfd-bc1f-2f9f07d2a087 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] No waiting events found dispatching network-vif-plugged-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1089.256348] env[62923]: WARNING nova.compute.manager [req-308556a1-1825-46ae-bee7-1029a902615d req-d96e99b1-e9e6-4cfd-bc1f-2f9f07d2a087 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received unexpected event network-vif-plugged-9add9dea-2408-497b-982c-a558a1db59bc for instance with vm_state building and task_state spawning. [ 1089.263262] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fd37d0-80a0-433b-b152-7106d6aaf151 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.267369] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ec548318-3fa8-4a48-a1f4-d4159a43e412 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b826c4d1-3e31-49da-8e16-8e512599912c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.812s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.277576] env[62923]: DEBUG nova.compute.provider_tree [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.345495] env[62923]: DEBUG nova.network.neutron [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Successfully updated port: 9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1089.545539] env[62923]: DEBUG nova.network.neutron [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updated VIF entry in instance network info cache for port 3b76277b-efa1-43eb-908f-60a7e2a9f7ad. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.545907] env[62923]: DEBUG nova.network.neutron [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updating instance_info_cache with network_info: [{"id": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "address": "fa:16:3e:8c:6b:5d", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b76277b-ef", "ovs_interfaceid": "3b76277b-efa1-43eb-908f-60a7e2a9f7ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.656672] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: b145b71c-c56b-4872-bb61-fa3e65fef04f] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.780987] env[62923]: DEBUG nova.scheduler.client.report [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1089.848185] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.848352] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.848508] env[62923]: DEBUG nova.network.neutron [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1090.048855] env[62923]: DEBUG oslo_concurrency.lockutils [req-757d6535-580c-43aa-8bc0-ee2a7c218046 req-b3fc43cd-cf30-4537-ba66-d1692528ee8a service nova] Releasing lock "refresh_cache-b826c4d1-3e31-49da-8e16-8e512599912c" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.160526] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 75f9473f-ca67-4bb5-8663-0ce3709885e9] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.285953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.307309] env[62923]: INFO nova.scheduler.client.report [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted allocations for instance aae1a2a3-57da-4846-8240-ac0626e9ebd8 [ 1090.382505] env[62923]: DEBUG nova.network.neutron [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.510888] env[62923]: DEBUG nova.network.neutron [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.664656] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 41cc788d-9be8-4959-9cef-d91304f5879d] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.815820] env[62923]: DEBUG oslo_concurrency.lockutils [None req-18fdc2b7-681e-4240-bdc2-3b15ca450188 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "aae1a2a3-57da-4846-8240-ac0626e9ebd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.251s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.013185] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.013528] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Instance network_info: |[{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1091.014055] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:3a:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9add9dea-2408-497b-982c-a558a1db59bc', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1091.022252] env[62923]: DEBUG oslo.service.loopingcall [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.022541] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1091.022777] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2313d263-dfc8-48f0-ab3c-ba52704f1070 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.046048] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1091.046048] env[62923]: value = "task-1370534" [ 1091.046048] env[62923]: _type = "Task" [ 1091.046048] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.055187] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370534, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.168493] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 906470fc-5fec-4c98-8a38-337361e12bc5] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.275916] env[62923]: DEBUG nova.compute.manager [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-changed-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1091.276276] env[62923]: DEBUG nova.compute.manager [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing instance network info cache due to event network-changed-9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1091.276470] env[62923]: DEBUG oslo_concurrency.lockutils [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.276699] env[62923]: DEBUG oslo_concurrency.lockutils [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.277197] env[62923]: DEBUG nova.network.neutron [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1091.556458] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370534, 'name': CreateVM_Task, 'duration_secs': 0.395589} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.556458] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.556966] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.557951] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.557951] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1091.557951] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b41cb8fe-2127-4423-a3e2-1ccf6031ada5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.562064] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1091.562064] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52bb2d4a-6c89-22ce-c736-4f4282dd4e9f" [ 1091.562064] env[62923]: _type = "Task" [ 1091.562064] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.569828] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52bb2d4a-6c89-22ce-c736-4f4282dd4e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.671754] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 65000ac5-1c28-4abe-bc96-c440f0b14d3d] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.972770] env[62923]: DEBUG nova.network.neutron [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updated VIF entry in instance network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1091.973116] env[62923]: DEBUG nova.network.neutron [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.072437] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52bb2d4a-6c89-22ce-c736-4f4282dd4e9f, 'name': SearchDatastore_Task, 'duration_secs': 0.012421} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.072720] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.072962] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.073287] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.073443] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.073697] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.073890] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e00dd19-8a86-43e9-be28-0d2396daf38e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.083688] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.083856] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1092.084598] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e7eac9d-39c7-43c9-9245-3d72dafc2277 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.090431] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1092.090431] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f716f0-2482-601e-fb48-2e856e96d2b7" [ 1092.090431] env[62923]: _type = "Task" [ 1092.090431] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.099476] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f716f0-2482-601e-fb48-2e856e96d2b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.174811] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 3f60e93d-15ae-4fe4-ba86-6b6b123b645c] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1092.478493] env[62923]: DEBUG oslo_concurrency.lockutils [req-bee3a168-1370-4682-a7d8-eae64753aab8 req-2fc2e861-23a8-46ca-a91a-f8096e9cd907 service nova] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.521166] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.521405] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.608428] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f716f0-2482-601e-fb48-2e856e96d2b7, 'name': SearchDatastore_Task, 'duration_secs': 0.009362} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.609273] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-437ab122-c26c-42ff-95c8-4ee979f8818d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.614539] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1092.614539] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f7e41b-599d-8c0e-c2fd-1d479073fc67" [ 1092.614539] env[62923]: _type = "Task" [ 1092.614539] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.623181] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f7e41b-599d-8c0e-c2fd-1d479073fc67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.677898] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: f52f5912-d6e8-4da5-ac39-65bb065b6555] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1093.024131] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1093.124945] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52f7e41b-599d-8c0e-c2fd-1d479073fc67, 'name': SearchDatastore_Task, 'duration_secs': 0.074015} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.124945] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.125231] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1093.125447] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d8e5d25-c407-4e92-a39f-ef56850bfc90 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.131870] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1093.131870] env[62923]: value = "task-1370536" [ 1093.131870] env[62923]: _type = "Task" [ 1093.131870] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.138984] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.180582] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 0a9fdd83-3818-4831-90f9-9d30713961c5] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1093.547058] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.547360] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.549012] env[62923]: INFO nova.compute.claims [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.641474] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.684089] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 3a40ada5-2dfa-4ef1-9e05-98ad3dc34fb7] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1094.142462] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370536, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.187517] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 60805eeb-8287-4064-9bd3-a7c6a21f40b5] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1094.633276] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b1981d-da81-4af1-822b-807dc4fe1651 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.645972] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e86095b-a6d6-48ed-85a1-6c025665654d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.648821] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370536, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.261118} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.649085] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.649303] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.649793] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95a48419-06fc-445f-8f54-0563431f5532 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.675979] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53eba5f4-18b9-40ff-ba5c-115e403c6093 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.679462] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1094.679462] env[62923]: value = "task-1370537" [ 1094.679462] env[62923]: _type = "Task" [ 1094.679462] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.684943] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ebaff3-fd5b-478e-ba6e-7e4182877679 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.693235] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: e6752138-5d66-469d-ac56-6bd169ad166e] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1094.695059] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370537, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.702752] env[62923]: DEBUG nova.compute.provider_tree [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.188716] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370537, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081499} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.189083] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.189764] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e932458-2b99-4aad-b3a9-b7d6becd7b58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.210522] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.210933] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 8489a1f6-e2cf-4631-8b7d-0b3977b9a8e7] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1095.214804] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32d0c0be-5974-42e1-849b-29e679300680 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.234983] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1095.234983] env[62923]: value = "task-1370539" [ 1095.234983] env[62923]: _type = "Task" [ 1095.234983] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.243248] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370539, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.244806] env[62923]: ERROR nova.scheduler.client.report [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [req-837658c3-dd3a-4bf9-8ea6-b0fa56565122] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-837658c3-dd3a-4bf9-8ea6-b0fa56565122"}]} [ 1095.261463] env[62923]: DEBUG nova.scheduler.client.report [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1095.276411] env[62923]: DEBUG nova.scheduler.client.report [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1095.276700] env[62923]: DEBUG nova.compute.provider_tree [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.287841] env[62923]: DEBUG nova.scheduler.client.report [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1095.311382] env[62923]: DEBUG nova.scheduler.client.report [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1095.397636] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9c6237-6571-432e-8054-28163c17b865 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.404870] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84492593-dd70-462b-9741-e374e05a8802 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.436439] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04b961c-1ea9-42d6-afe9-83ef25553e58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.443718] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07873362-b9d0-4709-b488-f9a95a4fbb69 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.456787] env[62923]: DEBUG nova.compute.provider_tree [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.729367] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 066da19f-daf0-44e3-8ae0-89f0c970cb92] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1095.744873] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370539, 'name': ReconfigVM_Task, 'duration_secs': 0.30783} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.745155] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Reconfigured VM instance instance-0000006e to attach disk [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.745768] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-712e8863-2a35-4a13-9679-f4d46d77de29 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.752402] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1095.752402] env[62923]: value = "task-1370540" [ 1095.752402] env[62923]: _type = "Task" [ 1095.752402] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.761427] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370540, 'name': Rename_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.989140] env[62923]: DEBUG nova.scheduler.client.report [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1095.989454] env[62923]: DEBUG nova.compute.provider_tree [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 145 to 146 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1095.989642] env[62923]: DEBUG nova.compute.provider_tree [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1096.232774] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d8bed052-7d83-471f-a18f-67c4c16a1b4a] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1096.263873] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370540, 'name': Rename_Task, 'duration_secs': 0.149306} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.264146] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.264392] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02ac66d9-c98d-45b9-9e43-6b53c2784526 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.270118] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1096.270118] env[62923]: value = "task-1370541" [ 1096.270118] env[62923]: _type = "Task" [ 1096.270118] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.277170] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370541, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.495397] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.947s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.495397] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1096.736094] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 43065826-0f2b-48dc-bc42-8e0fd84fdcd3] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1096.779879] env[62923]: DEBUG oslo_vmware.api [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370541, 'name': PowerOnVM_Task, 'duration_secs': 0.451782} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.780084] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1096.780385] env[62923]: INFO nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1096.780526] env[62923]: DEBUG nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1096.781272] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c78f56e-9b4a-4266-8efa-cde550bba563 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.000982] env[62923]: DEBUG nova.compute.utils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1097.002343] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1097.002512] env[62923]: DEBUG nova.network.neutron [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.050347] env[62923]: DEBUG nova.policy [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d2829faa8f74da8a1432abd0c2434f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76d290a91b3b4d9491f755fd3d7e7894', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1097.239446] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 67a83e64-c8bd-499c-895a-11976d69195b] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1097.302992] env[62923]: INFO nova.compute.manager [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Took 14.27 seconds to build instance. [ 1097.310923] env[62923]: DEBUG nova.network.neutron [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Successfully created port: fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1097.505349] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1097.744099] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 94d2670f-d858-437a-a166-d148a57e07ab] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1097.804709] env[62923]: DEBUG oslo_concurrency.lockutils [None req-e0851129-74f5-4a7d-bdf3-3cfa2240096e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.783s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.247515] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 92c59517-7e6f-45bd-8211-789a718d66d1] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1098.516985] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1098.543840] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1098.544767] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1098.544767] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1098.544767] env[62923]: DEBUG nova.virt.hardware [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1098.545227] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989884a3-74be-40f4-a730-e4af9566a98f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.554639] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1c6edc-c211-49e3-a5fb-29a43ee49145 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.586769] env[62923]: DEBUG nova.compute.manager [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-changed-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1098.587979] env[62923]: DEBUG nova.compute.manager [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing instance network info cache due to event network-changed-9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1098.587979] env[62923]: DEBUG oslo_concurrency.lockutils [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.587979] env[62923]: DEBUG oslo_concurrency.lockutils [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.587979] env[62923]: DEBUG nova.network.neutron [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.753455] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7c3edceb-cc58-4925-a97a-3204936c836d] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1098.810488] env[62923]: DEBUG nova.network.neutron [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Successfully updated port: fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.918084] env[62923]: DEBUG nova.compute.manager [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Stashing vm_state: active {{(pid=62923) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1099.257059] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: a616c7f0-8c39-4c08-a1a4-1d89e158d3c5] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1099.312843] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.313043] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.313252] env[62923]: DEBUG nova.network.neutron [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.340181] env[62923]: DEBUG nova.network.neutron [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updated VIF entry in instance network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.340662] env[62923]: DEBUG nova.network.neutron [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.441212] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.441562] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.760099] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 6cf594e3-e4a6-45f5-b8d2-06db1c200042] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1099.843038] env[62923]: DEBUG nova.network.neutron [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.845208] env[62923]: DEBUG oslo_concurrency.lockutils [req-6f4d0c79-4254-46ef-9d4b-d06be9ba8c19 req-fb029199-5bd4-4c50-9473-7b8c65e122ad service nova] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.946479] env[62923]: INFO nova.compute.claims [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.981845] env[62923]: DEBUG nova.network.neutron [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updating instance_info_cache with network_info: [{"id": "fef6cf51-9164-425e-8951-263bb2427ad4", "address": "fa:16:3e:f0:52:25", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef6cf51-91", "ovs_interfaceid": "fef6cf51-9164-425e-8951-263bb2427ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.263434] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 1fef5eb2-acb0-4d00-81a3-c270af7df0e8] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1100.454383] env[62923]: INFO nova.compute.resource_tracker [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating resource usage from migration 256675a0-062a-4dba-916b-7351347f01fb [ 1100.484044] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.484354] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Instance network_info: |[{"id": "fef6cf51-9164-425e-8951-263bb2427ad4", "address": "fa:16:3e:f0:52:25", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef6cf51-91", "ovs_interfaceid": "fef6cf51-9164-425e-8951-263bb2427ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1100.484761] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:52:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a78d5760-0bb1-4476-9578-8ad3c3144439', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fef6cf51-9164-425e-8951-263bb2427ad4', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.492240] env[62923]: DEBUG oslo.service.loopingcall [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.494385] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.494788] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f468d445-1717-4c6e-bd02-636ed589c2c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.515659] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.515659] env[62923]: value = "task-1370544" [ 1100.515659] env[62923]: _type = "Task" [ 1100.515659] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.524837] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370544, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.547013] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718622d3-b572-4a9c-9065-d02ef6d6f3b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.554882] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3903839d-7915-4156-80d1-d6573f1c7f7a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.584200] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b956f076-1a90-4147-9334-c9c5c17056d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.591127] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6da60fe-ec23-46ba-9d5f-1e9470362556 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.604495] env[62923]: DEBUG nova.compute.provider_tree [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.615735] env[62923]: DEBUG nova.compute.manager [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Received event network-vif-plugged-fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1100.615994] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.616240] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.616471] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.616654] env[62923]: DEBUG nova.compute.manager [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] No waiting events found dispatching network-vif-plugged-fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1100.616823] env[62923]: WARNING nova.compute.manager [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Received unexpected event network-vif-plugged-fef6cf51-9164-425e-8951-263bb2427ad4 for instance with vm_state building and task_state spawning. [ 1100.617079] env[62923]: DEBUG nova.compute.manager [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Received event network-changed-fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1100.617214] env[62923]: DEBUG nova.compute.manager [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Refreshing instance network info cache due to event network-changed-fef6cf51-9164-425e-8951-263bb2427ad4. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1100.617402] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] Acquiring lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.617541] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] Acquired lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.617700] env[62923]: DEBUG nova.network.neutron [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Refreshing network info cache for port fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1100.766379] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 6fa4d8a8-093f-4ae8-9148-f15f5bf98944] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.026858] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370544, 'name': CreateVM_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.109092] env[62923]: DEBUG nova.scheduler.client.report [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.269676] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 81cca322-c1a0-4fbd-8013-0e4a4694ecfd] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.326031] env[62923]: DEBUG nova.network.neutron [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updated VIF entry in instance network info cache for port fef6cf51-9164-425e-8951-263bb2427ad4. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1101.326411] env[62923]: DEBUG nova.network.neutron [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updating instance_info_cache with network_info: [{"id": "fef6cf51-9164-425e-8951-263bb2427ad4", "address": "fa:16:3e:f0:52:25", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef6cf51-91", "ovs_interfaceid": "fef6cf51-9164-425e-8951-263bb2427ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.525762] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370544, 'name': CreateVM_Task, 'duration_secs': 0.958301} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.525932] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.526544] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.526747] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.527084] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1101.527340] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb6cf70a-57fd-40fc-970a-97c33393e926 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.531930] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1101.531930] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52830db9-1228-153d-b027-ea3d178d7d99" [ 1101.531930] env[62923]: _type = "Task" [ 1101.531930] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.539752] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52830db9-1228-153d-b027-ea3d178d7d99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.613298] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.172s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.613521] env[62923]: INFO nova.compute.manager [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Migrating [ 1101.774039] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 98974fb7-049a-4c72-a352-bc0a50d2a879] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.830218] env[62923]: DEBUG oslo_concurrency.lockutils [req-fe68cf05-dcc4-4d27-9b52-45071a1a7227 req-8ce918fa-86c7-4df9-b900-26945c3a2d2a service nova] Releasing lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.043107] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52830db9-1228-153d-b027-ea3d178d7d99, 'name': SearchDatastore_Task, 'duration_secs': 0.010678} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.043449] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.043685] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.043920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.044082] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.044280] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.044535] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64e538af-a5c5-4230-a92b-b2943e722629 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.053267] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.053505] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.054577] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f03c48e8-cb1b-49dc-9995-3e01cd7347ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.059635] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1102.059635] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c8cdfc-2ac9-85a7-a588-0773c963087e" [ 1102.059635] env[62923]: _type = "Task" [ 1102.059635] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.066764] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c8cdfc-2ac9-85a7-a588-0773c963087e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.128722] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.128949] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.129120] env[62923]: DEBUG nova.network.neutron [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.277632] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 7c98c50a-e7c7-4430-b5c6-dec88a78c397] Instance has had 0 of 5 cleanup attempts {{(pid=62923) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.569737] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c8cdfc-2ac9-85a7-a588-0773c963087e, 'name': SearchDatastore_Task, 'duration_secs': 0.008199} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.570522] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3394fea1-2184-4da2-b135-c27b48304e2e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.575654] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1102.575654] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525d644b-1785-a24b-3023-864a027d9193" [ 1102.575654] env[62923]: _type = "Task" [ 1102.575654] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.582833] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525d644b-1785-a24b-3023-864a027d9193, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.781644] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.781826] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Cleaning up deleted instances with incomplete migration {{(pid=62923) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1102.842362] env[62923]: DEBUG nova.network.neutron [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.086043] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]525d644b-1785-a24b-3023-864a027d9193, 'name': SearchDatastore_Task, 'duration_secs': 0.009487} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.086347] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.086606] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] f0b447f4-7d0d-4a91-9e99-bf8fad24b750/f0b447f4-7d0d-4a91-9e99-bf8fad24b750.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.086960] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-907a684e-047b-4173-ad5c-be36d11f4431 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.093237] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1103.093237] env[62923]: value = "task-1370547" [ 1103.093237] env[62923]: _type = "Task" [ 1103.093237] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.102523] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.284845] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.345585] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.606737] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453324} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.607142] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore1] f0b447f4-7d0d-4a91-9e99-bf8fad24b750/f0b447f4-7d0d-4a91-9e99-bf8fad24b750.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1103.607457] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1103.607800] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a967e52-506c-49e9-bd16-1b8242769082 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.615517] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1103.615517] env[62923]: value = "task-1370548" [ 1103.615517] env[62923]: _type = "Task" [ 1103.615517] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.626291] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.126203] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096063} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.126449] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1104.127259] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57eb09f-f033-4b52-ad7b-fe9a708f37cb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.149731] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] f0b447f4-7d0d-4a91-9e99-bf8fad24b750/f0b447f4-7d0d-4a91-9e99-bf8fad24b750.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1104.150092] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d92babc-1e78-46de-adf8-4ac9ef1ddfb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.164764] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.165016] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.174343] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1104.174343] env[62923]: value = "task-1370549" [ 1104.174343] env[62923]: _type = "Task" [ 1104.174343] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.183178] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.670557] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.670992] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.670992] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.670992] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.671174] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.685007] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.859524] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c8bccd-6e2c-4fb2-a5ce-c8c28661cd7f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.879419] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 0 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1105.175293] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Getting list of instances from cluster (obj){ [ 1105.175293] env[62923]: value = "domain-c8" [ 1105.175293] env[62923]: _type = "ClusterComputeResource" [ 1105.175293] env[62923]: } {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1105.177203] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe8bdcb-c54d-494a-b232-f0d12118c48d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.189307] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370549, 'name': ReconfigVM_Task, 'duration_secs': 0.620845} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.197158] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Reconfigured VM instance instance-0000006f to attach disk [datastore1] f0b447f4-7d0d-4a91-9e99-bf8fad24b750/f0b447f4-7d0d-4a91-9e99-bf8fad24b750.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.197697] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Got total of 6 instances {{(pid=62923) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1105.197856] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Triggering sync for uuid 534fa654-ed73-4518-bdc7-d1f981628fd8 {{(pid=62923) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1105.198054] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Triggering sync for uuid 222b944d-c58e-476e-b723-fc2b6990120a {{(pid=62923) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1105.198218] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Triggering sync for uuid bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1105.198368] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Triggering sync for uuid b826c4d1-3e31-49da-8e16-8e512599912c {{(pid=62923) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1105.198515] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Triggering sync for uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1105.198661] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Triggering sync for uuid f0b447f4-7d0d-4a91-9e99-bf8fad24b750 {{(pid=62923) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1105.198955] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28f5c56e-0919-4764-b5c2-0dddd7a6aa4b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.200758] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "534fa654-ed73-4518-bdc7-d1f981628fd8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.200855] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.201130] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "222b944d-c58e-476e-b723-fc2b6990120a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.201318] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "222b944d-c58e-476e-b723-fc2b6990120a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.201482] env[62923]: INFO nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1105.201644] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "222b944d-c58e-476e-b723-fc2b6990120a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.201835] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.202022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.202258] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "b826c4d1-3e31-49da-8e16-8e512599912c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.202435] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "b826c4d1-3e31-49da-8e16-8e512599912c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.202662] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.202841] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.203079] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.203271] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.204745] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e218360f-6297-45be-a011-500d409a1aad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.208429] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2a59fb-9410-48c8-b04c-ba545bf615b5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.210690] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268c4e2-ff87-49fc-bd36-568b07a587c3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.213826] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89b1214-b7ea-4574-b99a-cb108203586d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.228059] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1105.228059] env[62923]: value = "task-1370551" [ 1105.228059] env[62923]: _type = "Task" [ 1105.228059] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.239707] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370551, 'name': Rename_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.385339] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.385982] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82f6f145-3a6f-442e-b5ee-1869e0cb5e42 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.393875] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1105.393875] env[62923]: value = "task-1370552" [ 1105.393875] env[62923]: _type = "Task" [ 1105.393875] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.401780] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.717812] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.718184] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.718227] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.718378] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1105.719332] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a1005c-9ec4-4af4-85bb-0d8882aad5a4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.727894] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca24d14c-d00a-455f-b63f-3f6dca4302a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.732349] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "b826c4d1-3e31-49da-8e16-8e512599912c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.732674] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.531s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.732958] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.532s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.733343] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.750331] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8624f4-0366-4b47-8927-fdc485f123a1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.752674] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370551, 'name': Rename_Task, 'duration_secs': 0.143626} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.752931] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.753527] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32fd2882-b363-47b4-868c-872d13c2d8ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.758309] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef2b806-aab2-458d-80ef-0d7c7f9da308 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.762100] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1105.762100] env[62923]: value = "task-1370553" [ 1105.762100] env[62923]: _type = "Task" [ 1105.762100] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.769544] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.794131] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180845MB free_disk=147GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1105.794312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.794560] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.903725] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.273711] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370553, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.405292] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370552, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.774212] env[62923]: DEBUG oslo_vmware.api [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370553, 'name': PowerOnVM_Task, 'duration_secs': 0.650477} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.774579] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.774705] env[62923]: INFO nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Took 8.26 seconds to spawn the instance on the hypervisor. [ 1106.774891] env[62923]: DEBUG nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1106.775648] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058f78ce-79b9-4ba1-a253-f06c4b71588e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.802291] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Applying migration context for instance 222b944d-c58e-476e-b723-fc2b6990120a as it has an incoming, in-progress migration 256675a0-062a-4dba-916b-7351347f01fb. Migration status is migrating {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1106.803220] env[62923]: INFO nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating resource usage from migration 256675a0-062a-4dba-916b-7351347f01fb [ 1106.821810] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.821954] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.822086] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b826c4d1-3e31-49da-8e16-8e512599912c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.822204] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d4bc3c6c-20ac-4714-8109-867a2f6292b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.822318] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f0b447f4-7d0d-4a91-9e99-bf8fad24b750 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.822432] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Migration 256675a0-062a-4dba-916b-7351347f01fb is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1106.822540] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 222b944d-c58e-476e-b723-fc2b6990120a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1106.822720] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1106.822853] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1106.906507] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370552, 'name': PowerOffVM_Task, 'duration_secs': 1.039979} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.907560] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.907759] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 17 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1106.911392] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641ce877-8e94-4dd8-9221-7922a6403e79 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.918527] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530fe6f4-2f4c-4abb-b3ef-d394c7d5b915 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.949965] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d857e9-5136-4073-8430-f9613dbfa9fe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.958314] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d5324f-f672-44c7-a770-b10448ada9ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.972070] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.293028] env[62923]: INFO nova.compute.manager [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Took 13.76 seconds to build instance. [ 1107.418245] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1107.418526] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1107.418688] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.418905] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1107.419075] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.419217] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1107.419424] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1107.419584] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1107.419751] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1107.419916] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1107.420135] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1107.425078] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbbb3837-2fa4-4926-aabd-2aec93e11d94 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.442279] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1107.442279] env[62923]: value = "task-1370554" [ 1107.442279] env[62923]: _type = "Task" [ 1107.442279] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.455483] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370554, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.474574] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1107.794650] env[62923]: DEBUG oslo_concurrency.lockutils [None req-1a814181-61a6-4b3f-963c-22765974cc10 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.273s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.794985] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.592s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.795263] env[62923]: INFO nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] During sync_power_state the instance has a pending task (spawning). Skip. [ 1107.795333] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.844320] env[62923]: DEBUG nova.compute.manager [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Received event network-changed-fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1107.844518] env[62923]: DEBUG nova.compute.manager [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Refreshing instance network info cache due to event network-changed-fef6cf51-9164-425e-8951-263bb2427ad4. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1107.844737] env[62923]: DEBUG oslo_concurrency.lockutils [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] Acquiring lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.844882] env[62923]: DEBUG oslo_concurrency.lockutils [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] Acquired lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.845057] env[62923]: DEBUG nova.network.neutron [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Refreshing network info cache for port fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1107.953759] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370554, 'name': ReconfigVM_Task, 'duration_secs': 0.166586} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.954073] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 33 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1107.980928] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1107.981077] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.187s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.461021] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1108.462047] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1108.462047] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.462047] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1108.462047] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.462047] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1108.462301] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1108.462516] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1108.462676] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1108.463171] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1108.463470] env[62923]: DEBUG nova.virt.hardware [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1108.469016] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1108.471471] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4923738f-7670-48c3-a992-2ee0774cfb3a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.492651] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1108.492651] env[62923]: value = "task-1370555" [ 1108.492651] env[62923]: _type = "Task" [ 1108.492651] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.500689] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370555, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.580097] env[62923]: DEBUG nova.network.neutron [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updated VIF entry in instance network info cache for port fef6cf51-9164-425e-8951-263bb2427ad4. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1108.580518] env[62923]: DEBUG nova.network.neutron [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updating instance_info_cache with network_info: [{"id": "fef6cf51-9164-425e-8951-263bb2427ad4", "address": "fa:16:3e:f0:52:25", "network": {"id": "fb946169-638c-440f-b01a-e55616d5fe4d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-412460922-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "76d290a91b3b4d9491f755fd3d7e7894", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a78d5760-0bb1-4476-9578-8ad3c3144439", "external-id": "nsx-vlan-transportzone-325", "segmentation_id": 325, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfef6cf51-91", "ovs_interfaceid": "fef6cf51-9164-425e-8951-263bb2427ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.002878] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370555, 'name': ReconfigVM_Task, 'duration_secs': 0.16618} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.003315] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1109.003926] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dc4466-4fae-489c-b815-073dbfbcb0ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.026527] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.026863] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5190fcac-bd09-44d7-a5dd-2c9d74e0ef98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.046270] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1109.046270] env[62923]: value = "task-1370556" [ 1109.046270] env[62923]: _type = "Task" [ 1109.046270] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.064102] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370556, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.084095] env[62923]: DEBUG oslo_concurrency.lockutils [req-27e14c15-afd9-4ed9-a1bb-f49dcb4b64c6 req-39411fb1-7b45-4016-bd85-e57aa25922f7 service nova] Releasing lock "refresh_cache-f0b447f4-7d0d-4a91-9e99-bf8fad24b750" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.267606] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b943c8e1-68e0-4313-bde7-865ba05408b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.267900] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.556998] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370556, 'name': ReconfigVM_Task, 'duration_secs': 0.318179} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.557256] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.557526] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 50 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1109.770075] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1110.066386] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0b7d17-669d-4140-b90b-51f1999291e2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.085202] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9ff15a-7ace-4bf2-934f-eae9f1aa69e6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.102630] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 67 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1110.293132] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.293402] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.295363] env[62923]: INFO nova.compute.claims [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1110.641935] env[62923]: DEBUG nova.network.neutron [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Port 9f0373d6-6c37-4438-8d48-8aa143026856 binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1111.397562] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9808a356-d768-4a09-9225-189afde00764 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.405950] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42ef4f1-3877-40d7-9910-a525df97c5da {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.437838] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761ef421-ad32-4d09-a576-de620e58f4d6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.445667] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33550c5b-f8a9-4fa4-9e11-56d13eeb5851 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.459256] env[62923]: DEBUG nova.compute.provider_tree [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.663668] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "222b944d-c58e-476e-b723-fc2b6990120a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.663758] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.663910] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.978602] env[62923]: ERROR nova.scheduler.client.report [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [req-81882bd2-45ec-4cda-82c5-28cfb272b889] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-81882bd2-45ec-4cda-82c5-28cfb272b889"}]} [ 1111.994394] env[62923]: DEBUG nova.scheduler.client.report [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1112.008093] env[62923]: DEBUG nova.scheduler.client.report [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1112.008350] env[62923]: DEBUG nova.compute.provider_tree [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1112.019459] env[62923]: DEBUG nova.scheduler.client.report [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1112.037176] env[62923]: DEBUG nova.scheduler.client.report [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1112.132774] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee27e81a-9cf6-4d96-9468-370fb7d685f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.142040] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab4da47-0850-41ba-b231-d06bf2ae64c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.178914] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c016afbc-7945-4404-bbd5-56c96a7448d7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.187826] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d1f588-ecbd-4546-87a9-f0d1e1c2e063 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.202019] env[62923]: DEBUG nova.compute.provider_tree [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1112.712923] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.713179] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.713308] env[62923]: DEBUG nova.network.neutron [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.731713] env[62923]: DEBUG nova.scheduler.client.report [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1112.732056] env[62923]: DEBUG nova.compute.provider_tree [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 149 to 150 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1112.732374] env[62923]: DEBUG nova.compute.provider_tree [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1113.237903] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.944s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.238331] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1113.432297] env[62923]: DEBUG nova.network.neutron [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.744049] env[62923]: DEBUG nova.compute.utils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1113.744960] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1113.745155] env[62923]: DEBUG nova.network.neutron [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1113.814409] env[62923]: DEBUG nova.policy [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '676a737149a9418498a55f83760df073', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d1cf5e642524949a8366bf54d00593e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1113.935393] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.089837] env[62923]: DEBUG nova.network.neutron [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Successfully created port: 9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1114.248838] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1114.467211] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff13f4c0-079d-4660-ada3-24711554f831 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.500438] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f804b55-cbc2-498b-8b3a-63054b39545d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.510817] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 83 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1114.753881] env[62923]: INFO nova.virt.block_device [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Booting with volume 2bdd0528-e981-4226-a215-481044e8f3b2 at /dev/sda [ 1114.791231] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0968a23a-85d0-40a3-83dd-b79c0399f011 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.803804] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc32bc6-eb14-4e5d-b407-d75000ffa90b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.841926] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9282369-48fa-42b1-9353-2bc616419f6f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.851368] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2947cf2-4032-439d-b1a6-725f83d8768b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.880474] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63c3be8-adf3-484c-ad3c-c6cbadd33725 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.887111] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bda264-3cd3-4c06-865e-763058a45d60 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.901163] env[62923]: DEBUG nova.virt.block_device [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating existing volume attachment record: def53c00-1096-4f48-ac89-088db369986c {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1115.019320] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.019612] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2909908f-81bd-4398-a54b-7b4d6a74aad8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.028186] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1115.028186] env[62923]: value = "task-1370557" [ 1115.028186] env[62923]: _type = "Task" [ 1115.028186] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.036562] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.474568] env[62923]: DEBUG nova.compute.manager [req-c86780a4-b7b4-46ff-9655-d0b19a1c92c7 req-76d86ad4-ad52-4831-a2c3-9fad1a497aea service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Received event network-vif-plugged-9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1115.474890] env[62923]: DEBUG oslo_concurrency.lockutils [req-c86780a4-b7b4-46ff-9655-d0b19a1c92c7 req-76d86ad4-ad52-4831-a2c3-9fad1a497aea service nova] Acquiring lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.475032] env[62923]: DEBUG oslo_concurrency.lockutils [req-c86780a4-b7b4-46ff-9655-d0b19a1c92c7 req-76d86ad4-ad52-4831-a2c3-9fad1a497aea service nova] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.475171] env[62923]: DEBUG oslo_concurrency.lockutils [req-c86780a4-b7b4-46ff-9655-d0b19a1c92c7 req-76d86ad4-ad52-4831-a2c3-9fad1a497aea service nova] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.475329] env[62923]: DEBUG nova.compute.manager [req-c86780a4-b7b4-46ff-9655-d0b19a1c92c7 req-76d86ad4-ad52-4831-a2c3-9fad1a497aea service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] No waiting events found dispatching network-vif-plugged-9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1115.475509] env[62923]: WARNING nova.compute.manager [req-c86780a4-b7b4-46ff-9655-d0b19a1c92c7 req-76d86ad4-ad52-4831-a2c3-9fad1a497aea service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Received unexpected event network-vif-plugged-9ee90698-8589-4858-8ef7-47e64099ac79 for instance with vm_state building and task_state block_device_mapping. [ 1115.538402] env[62923]: DEBUG oslo_vmware.api [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370557, 'name': PowerOnVM_Task, 'duration_secs': 0.38} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.538670] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.538856] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2e82e79a-bb39-40f1-8eda-044359970b1c tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance '222b944d-c58e-476e-b723-fc2b6990120a' progress to 100 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1115.564509] env[62923]: DEBUG nova.network.neutron [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Successfully updated port: 9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1116.067854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1116.068145] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.068463] env[62923]: DEBUG nova.network.neutron [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.601541] env[62923]: DEBUG nova.network.neutron [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.734308] env[62923]: DEBUG nova.network.neutron [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.983950] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1116.984523] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1116.984761] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1116.984988] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.985116] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1116.985265] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.985414] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1116.985632] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1116.985798] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1116.985975] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1116.986147] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1116.986334] env[62923]: DEBUG nova.virt.hardware [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1116.987342] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e49047-1355-4bd3-9881-f22d019f93c7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.996542] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5069f2d-3e36-4bea-a338-8434d5a09933 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.237155] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1117.237513] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Instance network_info: |[{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1117.237929] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:8a:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e41070eb-3ac1-4ca9-a3d0-fd65893a97de', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ee90698-8589-4858-8ef7-47e64099ac79', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.245256] env[62923]: DEBUG oslo.service.loopingcall [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1117.245479] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.245697] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7e1c96c-ca53-4dd6-9567-a2966b93128d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.266558] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.266558] env[62923]: value = "task-1370558" [ 1117.266558] env[62923]: _type = "Task" [ 1117.266558] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.280180] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370558, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.501076] env[62923]: DEBUG nova.compute.manager [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Received event network-changed-9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1117.501328] env[62923]: DEBUG nova.compute.manager [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Refreshing instance network info cache due to event network-changed-9ee90698-8589-4858-8ef7-47e64099ac79. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1117.501581] env[62923]: DEBUG oslo_concurrency.lockutils [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] Acquiring lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.501762] env[62923]: DEBUG oslo_concurrency.lockutils [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] Acquired lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.501947] env[62923]: DEBUG nova.network.neutron [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Refreshing network info cache for port 9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.777681] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370558, 'name': CreateVM_Task, 'duration_secs': 0.399046} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.777869] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1117.778583] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291548', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'name': 'volume-2bdd0528-e981-4226-a215-481044e8f3b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b943c8e1-68e0-4313-bde7-865ba05408b9', 'attached_at': '', 'detached_at': '', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'serial': '2bdd0528-e981-4226-a215-481044e8f3b2'}, 'mount_device': '/dev/sda', 'attachment_id': 'def53c00-1096-4f48-ac89-088db369986c', 'guest_format': None, 'disk_bus': None, 'delete_on_termination': True, 'boot_index': 0, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62923) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1117.778795] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Root volume attach. Driver type: vmdk {{(pid=62923) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1117.779626] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0701c0c6-18f7-4850-8448-d661d7897d64 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.789178] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677a3e47-1fc3-4823-8755-4d4f59e2f47c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.795245] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd39c0c-d66d-47a5-9c96-894b637ecff9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.802259] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-8f31aba3-c046-4914-ae01-36197a0f5310 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.813467] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1117.813467] env[62923]: value = "task-1370559" [ 1117.813467] env[62923]: _type = "Task" [ 1117.813467] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.822928] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.232492] env[62923]: DEBUG nova.network.neutron [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updated VIF entry in instance network info cache for port 9ee90698-8589-4858-8ef7-47e64099ac79. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.232968] env[62923]: DEBUG nova.network.neutron [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.324527] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 38%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.411680] env[62923]: DEBUG nova.network.neutron [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Port 9f0373d6-6c37-4438-8d48-8aa143026856 binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1118.412091] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.412323] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.412560] env[62923]: DEBUG nova.network.neutron [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1118.736531] env[62923]: DEBUG oslo_concurrency.lockutils [req-614bbe47-5c07-4bb2-ab52-6d2e29579f5d req-de2dec0b-ee94-4618-a229-8c9063606426 service nova] Releasing lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.825143] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 53%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.145030] env[62923]: DEBUG nova.network.neutron [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.274993] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.275481] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.326750] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 67%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.648655] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.779156] env[62923]: DEBUG nova.compute.utils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1119.829048] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 82%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.152511] env[62923]: DEBUG nova.compute.manager [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62923) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1120.152740] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.153014] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.282022] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.326211] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 97%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.655925] env[62923]: DEBUG nova.objects.instance [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'migration_context' on Instance uuid 222b944d-c58e-476e-b723-fc2b6990120a {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.827909] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 97%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.271028] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258a12e1-363a-4b53-bd40-05f336af1487 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.278885] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2d5dd4-84e2-477e-9321-225f519af611 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.309239] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4a70dc-90f2-4aa3-83b1-a324bd6f3217 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.317103] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a18cde4-d345-419f-a4d2-937d7046be88 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.334358] env[62923]: DEBUG nova.compute.provider_tree [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.338505] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 97%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.360565] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.360784] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.361009] env[62923]: INFO nova.compute.manager [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Attaching volume 0b89c802-11be-4286-bf70-8fea6df7f9c7 to /dev/sdb [ 1121.392047] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1364c0f4-28ae-45d3-989b-3ed7740ca319 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.399674] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49d4cb0-2805-499f-9136-a8f11f9b063b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.414441] env[62923]: DEBUG nova.virt.block_device [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating existing volume attachment record: be2d6f3b-d020-42f0-995f-a6f18bcf907c {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1121.831363] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task} progress is 98%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.840475] env[62923]: DEBUG nova.scheduler.client.report [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1122.332475] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370559, 'name': RelocateVM_Task, 'duration_secs': 4.207572} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.332761] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1122.332967] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291548', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'name': 'volume-2bdd0528-e981-4226-a215-481044e8f3b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b943c8e1-68e0-4313-bde7-865ba05408b9', 'attached_at': '', 'detached_at': '', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'serial': '2bdd0528-e981-4226-a215-481044e8f3b2'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1122.333740] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a93a9cf-e82f-4ae5-93c1-4fb011f51a50 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.352875] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d238706b-e37c-417c-9503-48436f41c48d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.375818] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] volume-2bdd0528-e981-4226-a215-481044e8f3b2/volume-2bdd0528-e981-4226-a215-481044e8f3b2.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.376882] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2d3d129-dc6e-42da-b7b3-cb1933dee16c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.398031] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1122.398031] env[62923]: value = "task-1370561" [ 1122.398031] env[62923]: _type = "Task" [ 1122.398031] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.406466] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370561, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.858416] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.705s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.908293] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370561, 'name': ReconfigVM_Task, 'duration_secs': 0.424255} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.908561] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfigured VM instance instance-00000070 to attach disk [datastore2] volume-2bdd0528-e981-4226-a215-481044e8f3b2/volume-2bdd0528-e981-4226-a215-481044e8f3b2.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.913309] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d8cd6e7-e6b0-4de8-9f04-852c1cfd3a71 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.930230] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1122.930230] env[62923]: value = "task-1370562" [ 1122.930230] env[62923]: _type = "Task" [ 1122.930230] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.938568] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.439972] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370562, 'name': ReconfigVM_Task, 'duration_secs': 0.241748} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.440315] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291548', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'name': 'volume-2bdd0528-e981-4226-a215-481044e8f3b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b943c8e1-68e0-4313-bde7-865ba05408b9', 'attached_at': '', 'detached_at': '', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'serial': '2bdd0528-e981-4226-a215-481044e8f3b2'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1123.440839] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa72767b-7a00-40e0-b97a-4c2373567cb2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.448055] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1123.448055] env[62923]: value = "task-1370563" [ 1123.448055] env[62923]: _type = "Task" [ 1123.448055] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.455614] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370563, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.957903] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370563, 'name': Rename_Task, 'duration_secs': 0.137066} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.958230] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.958477] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c871206b-36ea-41dd-9062-baab613e6130 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.965622] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1123.965622] env[62923]: value = "task-1370565" [ 1123.965622] env[62923]: _type = "Task" [ 1123.965622] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.973912] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.394290] env[62923]: INFO nova.compute.manager [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Swapping old allocation on dict_keys(['a513b783-544c-421b-85ec-cfd6d6ee698d']) held by migration 256675a0-062a-4dba-916b-7351347f01fb for instance [ 1124.416584] env[62923]: DEBUG nova.scheduler.client.report [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Overwriting current allocation {'allocations': {'a513b783-544c-421b-85ec-cfd6d6ee698d': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 150}}, 'project_id': '011a5ec25af44f92961be00f82c10c08', 'user_id': '182e1b6f26ed401da24d07a85f993802', 'consumer_generation': 1} on consumer 222b944d-c58e-476e-b723-fc2b6990120a {{(pid=62923) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1124.475634] env[62923]: DEBUG oslo_vmware.api [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370565, 'name': PowerOnVM_Task, 'duration_secs': 0.426754} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.475900] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.476117] env[62923]: INFO nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1124.476300] env[62923]: DEBUG nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1124.477110] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142554b1-6027-4e0d-a511-e25449eee0ac {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.525070] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.525269] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.525445] env[62923]: DEBUG nova.network.neutron [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1124.995876] env[62923]: INFO nova.compute.manager [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Took 14.72 seconds to build instance. [ 1125.265693] env[62923]: DEBUG nova.network.neutron [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [{"id": "9f0373d6-6c37-4438-8d48-8aa143026856", "address": "fa:16:3e:5e:2b:a4", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0373d6-6c", "ovs_interfaceid": "9f0373d6-6c37-4438-8d48-8aa143026856", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.498164] env[62923]: DEBUG oslo_concurrency.lockutils [None req-2911af93-ff9a-445c-8b79-b2939403d39b tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.230s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.769029] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-222b944d-c58e-476e-b723-fc2b6990120a" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.769029] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.769229] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37e1698f-dba3-4dce-9787-12c3b71ea10e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.779898] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1125.779898] env[62923]: value = "task-1370566" [ 1125.779898] env[62923]: _type = "Task" [ 1125.779898] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.793065] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.958497] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1125.958855] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291551', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'name': 'volume-0b89c802-11be-4286-bf70-8fea6df7f9c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'serial': '0b89c802-11be-4286-bf70-8fea6df7f9c7'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1125.959761] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1e1f84-28e8-49c4-a867-555f89f5f4df {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.978715] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3154458d-fe7f-414d-8ec3-05b03cf37a0f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.005868] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-0b89c802-11be-4286-bf70-8fea6df7f9c7/volume-0b89c802-11be-4286-bf70-8fea6df7f9c7.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.006316] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f2b0f56-ed80-4ddb-aebb-c07f5a7092c5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.029243] env[62923]: DEBUG oslo_vmware.api [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1126.029243] env[62923]: value = "task-1370567" [ 1126.029243] env[62923]: _type = "Task" [ 1126.029243] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.046108] env[62923]: DEBUG oslo_vmware.api [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370567, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.098304] env[62923]: DEBUG nova.compute.manager [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Received event network-changed-e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1126.098432] env[62923]: DEBUG nova.compute.manager [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Refreshing instance network info cache due to event network-changed-e5f5c80e-b51d-4788-a346-d4ff5982fa57. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1126.098776] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] Acquiring lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.098776] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] Acquired lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.098942] env[62923]: DEBUG nova.network.neutron [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Refreshing network info cache for port e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1126.291014] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370566, 'name': PowerOffVM_Task, 'duration_secs': 0.236881} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.291420] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1126.292202] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1126.292473] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1126.292690] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.292935] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1126.293158] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.293373] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1126.293666] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1126.293897] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1126.294138] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1126.294368] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1126.294602] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1126.299863] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f605066e-ed94-4d67-bef6-ae25bb2c077e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.318113] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1126.318113] env[62923]: value = "task-1370568" [ 1126.318113] env[62923]: _type = "Task" [ 1126.318113] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.327057] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370568, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.540341] env[62923]: DEBUG oslo_vmware.api [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370567, 'name': ReconfigVM_Task, 'duration_secs': 0.389945} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.540647] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-0b89c802-11be-4286-bf70-8fea6df7f9c7/volume-0b89c802-11be-4286-bf70-8fea6df7f9c7.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1126.545460] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af44cbcc-a9c0-4d8a-b97e-5c54e9521bb8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.561368] env[62923]: DEBUG oslo_vmware.api [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1126.561368] env[62923]: value = "task-1370569" [ 1126.561368] env[62923]: _type = "Task" [ 1126.561368] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.569648] env[62923]: DEBUG oslo_vmware.api [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370569, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.828013] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370568, 'name': ReconfigVM_Task, 'duration_secs': 0.186635} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.828898] env[62923]: DEBUG nova.network.neutron [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updated VIF entry in instance network info cache for port e5f5c80e-b51d-4788-a346-d4ff5982fa57. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1126.829274] env[62923]: DEBUG nova.network.neutron [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [{"id": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "address": "fa:16:3e:c8:44:3f", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5f5c80e-b5", "ovs_interfaceid": "e5f5c80e-b51d-4788-a346-d4ff5982fa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.831009] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bbc5fa-17fb-40c2-b42c-5ef72e84894d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.851717] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1126.851950] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1126.852123] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1126.852304] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1126.852451] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1126.852594] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1126.852810] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1126.852996] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1126.853186] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1126.853353] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1126.853525] env[62923]: DEBUG nova.virt.hardware [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1126.854878] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27331b5a-c88d-4167-b321-d97629a10b32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.861380] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1126.861380] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e2eace-0d56-4c3c-1ef2-2424e7b062b3" [ 1126.861380] env[62923]: _type = "Task" [ 1126.861380] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.870438] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e2eace-0d56-4c3c-1ef2-2424e7b062b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.073018] env[62923]: DEBUG oslo_vmware.api [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370569, 'name': ReconfigVM_Task, 'duration_secs': 0.141371} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.073354] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291551', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'name': 'volume-0b89c802-11be-4286-bf70-8fea6df7f9c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'serial': '0b89c802-11be-4286-bf70-8fea6df7f9c7'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1127.334734] env[62923]: DEBUG oslo_concurrency.lockutils [req-1e4b1a51-c35f-429f-9a0c-c637ff78a2ec req-2b463a44-e52a-44dd-b564-92124254a41f service nova] Releasing lock "refresh_cache-534fa654-ed73-4518-bdc7-d1f981628fd8" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.371736] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e2eace-0d56-4c3c-1ef2-2424e7b062b3, 'name': SearchDatastore_Task, 'duration_secs': 0.008834} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.377089] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1127.377379] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edc42a27-ef9c-4abc-83bc-d3abcde4a341 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.395806] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1127.395806] env[62923]: value = "task-1370570" [ 1127.395806] env[62923]: _type = "Task" [ 1127.395806] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.404358] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370570, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.525163] env[62923]: DEBUG nova.compute.manager [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Stashing vm_state: active {{(pid=62923) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1127.906313] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370570, 'name': ReconfigVM_Task, 'duration_secs': 0.21199} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.906588] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1127.907450] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985f013e-8fb5-45d0-8e04-0f9b9d65cb2c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.929137] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1127.929400] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6503eea-a063-4fc3-9cc4-914ebcf1371f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.950638] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1127.950638] env[62923]: value = "task-1370571" [ 1127.950638] env[62923]: _type = "Task" [ 1127.950638] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.959385] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.046398] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.046398] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.109529] env[62923]: DEBUG nova.objects.instance [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.124460] env[62923]: DEBUG nova.compute.manager [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Received event network-changed-9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1128.124619] env[62923]: DEBUG nova.compute.manager [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Refreshing instance network info cache due to event network-changed-9ee90698-8589-4858-8ef7-47e64099ac79. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1128.124947] env[62923]: DEBUG oslo_concurrency.lockutils [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] Acquiring lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.125135] env[62923]: DEBUG oslo_concurrency.lockutils [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] Acquired lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.125333] env[62923]: DEBUG nova.network.neutron [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Refreshing network info cache for port 9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1128.461356] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.551111] env[62923]: INFO nova.compute.claims [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.614846] env[62923]: DEBUG oslo_concurrency.lockutils [None req-cb9f72af-48b1-40f6-a2c1-8bcf2b34c428 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.859701] env[62923]: DEBUG nova.network.neutron [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updated VIF entry in instance network info cache for port 9ee90698-8589-4858-8ef7-47e64099ac79. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.860077] env[62923]: DEBUG nova.network.neutron [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.961402] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370571, 'name': ReconfigVM_Task, 'duration_secs': 0.940675} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.961570] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a/222b944d-c58e-476e-b723-fc2b6990120a.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1128.962442] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571a10b1-71f4-4d35-9511-b7ab72e95ae5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.980235] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9641fbae-99eb-493f-b31d-6f451687275a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.997752] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03cf7b7-e59d-4343-b8e6-34b0fe4f300d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.014925] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6657dd9f-bf03-4d5a-b16f-02c2b129ab32 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.021437] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.021674] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85fdbeff-52e1-472e-9771-fe8376b0455a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.028062] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1129.028062] env[62923]: value = "task-1370572" [ 1129.028062] env[62923]: _type = "Task" [ 1129.028062] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.036727] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.058379] env[62923]: INFO nova.compute.resource_tracker [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating resource usage from migration 5f292209-3bb9-49ed-8c52-c1b8dbf590f8 [ 1129.165973] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2300887-a6ce-4592-83ab-fa0d4bcf8284 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.174019] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f58a37-2ba4-4735-9ab7-a9a43a37226a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.205454] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbb3848-8715-4778-ac2b-a42ad391c6f1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.213740] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97834f8d-33c6-4cdf-aac2-458d182b9ad4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.227677] env[62923]: DEBUG nova.compute.provider_tree [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1129.362710] env[62923]: DEBUG oslo_concurrency.lockutils [req-f3aef187-1732-4a25-b337-89fa89de7adb req-ff869c80-4c8f-4b6d-8e03-75bd8815bd27 service nova] Releasing lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.456636] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.456965] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.541066] env[62923]: DEBUG oslo_vmware.api [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370572, 'name': PowerOnVM_Task, 'duration_secs': 0.39954} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.541364] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1129.748655] env[62923]: ERROR nova.scheduler.client.report [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [req-8e8587b3-a0d7-4051-9b10-4cd58312242f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8e8587b3-a0d7-4051-9b10-4cd58312242f"}]} [ 1129.767399] env[62923]: DEBUG nova.scheduler.client.report [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1129.780469] env[62923]: DEBUG nova.scheduler.client.report [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1129.780675] env[62923]: DEBUG nova.compute.provider_tree [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1129.790756] env[62923]: DEBUG nova.scheduler.client.report [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1129.807491] env[62923]: DEBUG nova.scheduler.client.report [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1129.898481] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e27c51a-08f4-4d24-8c49-865fb9f86e15 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.906998] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4bc7ef-f952-4d40-a7e1-11ba575bef8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.938095] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed640784-29cc-4f0c-b9a4-e325a3b3a1b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.946126] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b07ec5c-1cb5-4499-9c04-afe4e60c41c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.960107] env[62923]: DEBUG nova.compute.utils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.961505] env[62923]: DEBUG nova.compute.provider_tree [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1130.464173] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.492240] env[62923]: DEBUG nova.scheduler.client.report [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1130.492503] env[62923]: DEBUG nova.compute.provider_tree [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 153 to 154 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1130.492686] env[62923]: DEBUG nova.compute.provider_tree [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1130.552018] env[62923]: INFO nova.compute.manager [None req-a4631cec-aab2-4d40-b334-fcd579cd36c9 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance to original state: 'active' [ 1130.997293] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.951s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.997504] env[62923]: INFO nova.compute.manager [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Migrating [ 1131.514034] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.514395] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.514516] env[62923]: DEBUG nova.network.neutron [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1131.522782] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.523010] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.523244] env[62923]: INFO nova.compute.manager [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Attaching volume 0a9c8437-1886-446b-ade7-2bca0a7cf9e7 to /dev/sdc [ 1131.558599] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bf0bd4-249c-443b-8ada-9e069dbaffed {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.568570] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3989cce-4d00-4328-9e38-cc8b0ea22a81 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.584101] env[62923]: DEBUG nova.virt.block_device [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating existing volume attachment record: c3b3cab9-e618-41cd-a3bf-0d97cce035aa {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1132.245201] env[62923]: DEBUG nova.network.neutron [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.381840] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "222b944d-c58e-476e-b723-fc2b6990120a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.382156] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.382453] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "222b944d-c58e-476e-b723-fc2b6990120a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.382563] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.382728] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.384811] env[62923]: INFO nova.compute.manager [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Terminating instance [ 1132.386627] env[62923]: DEBUG nova.compute.manager [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1132.386851] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1132.387689] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2b7a33-8d47-4808-8c86-6a9da4d0d995 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.396319] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.396554] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32843f37-7043-4c0c-928a-66047324d8ff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.403516] env[62923]: DEBUG oslo_vmware.api [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1132.403516] env[62923]: value = "task-1370574" [ 1132.403516] env[62923]: _type = "Task" [ 1132.403516] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.411980] env[62923]: DEBUG oslo_vmware.api [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370574, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.748769] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.915063] env[62923]: DEBUG oslo_vmware.api [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370574, 'name': PowerOffVM_Task, 'duration_secs': 0.185483} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.915063] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.915063] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1132.915063] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b99c0349-ad67-4ec7-ad9c-7d7687685705 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.980270] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1132.980620] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1132.980826] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleting the datastore file [datastore2] 222b944d-c58e-476e-b723-fc2b6990120a {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1132.981123] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-172d1f73-b5ff-4814-acac-8a5176794445 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.990087] env[62923]: DEBUG oslo_vmware.api [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1132.990087] env[62923]: value = "task-1370576" [ 1132.990087] env[62923]: _type = "Task" [ 1132.990087] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.998135] env[62923]: DEBUG oslo_vmware.api [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.500975] env[62923]: DEBUG oslo_vmware.api [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149986} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.501239] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1133.501421] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1133.501594] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1133.501768] env[62923]: INFO nova.compute.manager [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1133.502015] env[62923]: DEBUG oslo.service.loopingcall [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1133.502221] env[62923]: DEBUG nova.compute.manager [-] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1133.502314] env[62923]: DEBUG nova.network.neutron [-] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1134.006712] env[62923]: DEBUG nova.compute.manager [req-1bcbcb1d-0e45-4404-87a5-dc3f485af993 req-8e07de27-f9d1-4723-8a88-e17332614480 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Received event network-vif-deleted-9f0373d6-6c37-4438-8d48-8aa143026856 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1134.007027] env[62923]: INFO nova.compute.manager [req-1bcbcb1d-0e45-4404-87a5-dc3f485af993 req-8e07de27-f9d1-4723-8a88-e17332614480 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Neutron deleted interface 9f0373d6-6c37-4438-8d48-8aa143026856; detaching it from the instance and deleting it from the info cache [ 1134.007217] env[62923]: DEBUG nova.network.neutron [req-1bcbcb1d-0e45-4404-87a5-dc3f485af993 req-8e07de27-f9d1-4723-8a88-e17332614480 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.265267] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fbfe28-b592-4ecd-aac9-4fa1a1eacd66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.284658] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 0 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1134.480128] env[62923]: DEBUG nova.network.neutron [-] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.509634] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6f68d0c-a057-4537-9433-a6f8c4bc49a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.519618] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da744437-931e-4aac-aac0-744e0142d26d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.549468] env[62923]: DEBUG nova.compute.manager [req-1bcbcb1d-0e45-4404-87a5-dc3f485af993 req-8e07de27-f9d1-4723-8a88-e17332614480 service nova] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Detach interface failed, port_id=9f0373d6-6c37-4438-8d48-8aa143026856, reason: Instance 222b944d-c58e-476e-b723-fc2b6990120a could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1134.790182] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.790505] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3bc7d24-ea41-4800-990c-fa87fc992616 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.798315] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1134.798315] env[62923]: value = "task-1370578" [ 1134.798315] env[62923]: _type = "Task" [ 1134.798315] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.807904] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.982969] env[62923]: INFO nova.compute.manager [-] [instance: 222b944d-c58e-476e-b723-fc2b6990120a] Took 1.48 seconds to deallocate network for instance. [ 1135.310450] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370578, 'name': PowerOffVM_Task, 'duration_secs': 0.169922} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.310737] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1135.310916] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 17 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.490095] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.490095] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.490095] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1135.511639] env[62923]: INFO nova.scheduler.client.report [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted allocations for instance 222b944d-c58e-476e-b723-fc2b6990120a [ 1135.817054] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1135.817234] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1135.817313] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1135.817496] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1135.817643] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1135.817849] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1135.818077] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1135.818246] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1135.818414] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1135.818620] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1135.818808] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1135.824012] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b6ba8b1-e094-4c67-b0b4-e407c7d68ca2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.841593] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1135.841593] env[62923]: value = "task-1370579" [ 1135.841593] env[62923]: _type = "Task" [ 1135.841593] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.849964] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370579, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.018522] env[62923]: DEBUG oslo_concurrency.lockutils [None req-596fd542-ea33-4fb7-829e-65f835a16ca7 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "222b944d-c58e-476e-b723-fc2b6990120a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.636s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.128503] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1136.128787] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291552', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'name': 'volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'serial': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1136.129683] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db86fb02-a200-46b4-a0c6-e9cd93882b2f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.146555] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a97726e-e3c8-4286-84de-b79cd78ee5a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.173409] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7/volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.173639] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6b02515-d91a-4769-b2bb-392c67acd97d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.193345] env[62923]: DEBUG oslo_vmware.api [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1136.193345] env[62923]: value = "task-1370580" [ 1136.193345] env[62923]: _type = "Task" [ 1136.193345] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.201475] env[62923]: DEBUG oslo_vmware.api [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370580, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.352064] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370579, 'name': ReconfigVM_Task, 'duration_secs': 0.158297} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.352475] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 33 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.705537] env[62923]: DEBUG oslo_vmware.api [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370580, 'name': ReconfigVM_Task, 'duration_secs': 0.37841} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.705839] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7/volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.710439] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e30da29-7511-4b11-8730-2bc79d13ca7e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.725973] env[62923]: DEBUG oslo_vmware.api [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1136.725973] env[62923]: value = "task-1370581" [ 1136.725973] env[62923]: _type = "Task" [ 1136.725973] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.734056] env[62923]: DEBUG oslo_vmware.api [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370581, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.859595] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1136.859788] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1136.859988] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.860215] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1136.860377] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.860527] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1136.860782] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1136.860887] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1136.861063] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1136.861247] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1136.861397] env[62923]: DEBUG nova.virt.hardware [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1136.866675] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1136.868093] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05b83764-cf4b-4c6c-8669-76ef973e8ac5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.889951] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1136.889951] env[62923]: value = "task-1370582" [ 1136.889951] env[62923]: _type = "Task" [ 1136.889951] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.901735] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370582, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.236740] env[62923]: DEBUG oslo_vmware.api [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370581, 'name': ReconfigVM_Task, 'duration_secs': 0.14168} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.237033] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291552', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'name': 'volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'serial': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1137.288243] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "ecbf08d7-e908-4496-8820-b0239bb051b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.288479] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.384713] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.385117] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.385214] env[62923]: INFO nova.compute.manager [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Shelving [ 1137.401745] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370582, 'name': ReconfigVM_Task, 'duration_secs': 0.154268} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.402033] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1137.402826] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f357dc2-d8d5-42c9-abe2-590cf7adb2df {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.425607] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] volume-2bdd0528-e981-4226-a215-481044e8f3b2/volume-2bdd0528-e981-4226-a215-481044e8f3b2.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1137.425901] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4da60fd8-9ab4-43d2-b201-6e0c87c4e2b1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.445665] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1137.445665] env[62923]: value = "task-1370583" [ 1137.445665] env[62923]: _type = "Task" [ 1137.445665] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.454229] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.791213] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Starting instance... {{(pid=62923) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1137.892523] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.892894] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a64af40-f5a3-4ea2-ba8d-b30adf72cb92 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.901380] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1137.901380] env[62923]: value = "task-1370584" [ 1137.901380] env[62923]: _type = "Task" [ 1137.901380] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.912018] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.956354] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370583, 'name': ReconfigVM_Task, 'duration_secs': 0.481496} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.957146] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfigured VM instance instance-00000070 to attach disk [datastore2] volume-2bdd0528-e981-4226-a215-481044e8f3b2/volume-2bdd0528-e981-4226-a215-481044e8f3b2.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.957146] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 50 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.273046] env[62923]: DEBUG nova.objects.instance [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.319291] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.319564] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.321140] env[62923]: INFO nova.compute.claims [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1138.411259] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370584, 'name': PowerOffVM_Task, 'duration_secs': 0.326225} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.411556] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1138.412314] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8699f4c5-5c46-4c7c-8651-195bcd3e65a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.429811] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b89d53-9ed0-412b-89b6-2c71ff198b4c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.464755] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9a6b4e-6ee4-4c15-9f5e-6a331b8f9212 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.485178] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b108d24-41e3-4567-a68d-533e4dbe1c21 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.505203] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 67 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.778902] env[62923]: DEBUG oslo_concurrency.lockutils [None req-b1bfa75d-fe59-4390-bbba-e32ca67071ac tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.256s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.939619] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Creating Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1138.939966] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5c91c390-560e-4d0f-b1b3-1e44643935f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.948646] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1138.948646] env[62923]: value = "task-1370585" [ 1138.948646] env[62923]: _type = "Task" [ 1138.948646] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.957788] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370585, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.077722] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.078432] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.250733] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.250934] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1139.438620] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75837e9-e555-4e19-903a-fae52c58a93d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.446754] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d23361-4982-4bfe-8441-5c6122056053 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.458860] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370585, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.484270] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e646c9d5-f8b4-40ab-b2c2-19a7dca6b835 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.492548] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d797054e-ea51-4478-bcb3-518d52d04358 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.506940] env[62923]: DEBUG nova.compute.provider_tree [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1139.581200] env[62923]: INFO nova.compute.manager [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Detaching volume 0b89c802-11be-4286-bf70-8fea6df7f9c7 [ 1139.609121] env[62923]: INFO nova.virt.block_device [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Attempting to driver detach volume 0b89c802-11be-4286-bf70-8fea6df7f9c7 from mountpoint /dev/sdb [ 1139.612391] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1139.612391] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291551', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'name': 'volume-0b89c802-11be-4286-bf70-8fea6df7f9c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'serial': '0b89c802-11be-4286-bf70-8fea6df7f9c7'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1139.612391] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82db0f3-da69-4d81-8c70-d89f008ff665 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.634843] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f11da15-fcec-441d-82f2-d53010d0885b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.641893] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac22f96-5057-4abf-83b9-a23159156603 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.664353] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad69bd4-b3fa-463d-abbf-9133f9f31212 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.678183] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] The volume has not been displaced from its original location: [datastore2] volume-0b89c802-11be-4286-bf70-8fea6df7f9c7/volume-0b89c802-11be-4286-bf70-8fea6df7f9c7.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1139.683274] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1139.683778] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cde1228c-41cc-4c5d-9d64-f4aa0fc51fb1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.702073] env[62923]: DEBUG oslo_vmware.api [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1139.702073] env[62923]: value = "task-1370586" [ 1139.702073] env[62923]: _type = "Task" [ 1139.702073] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.711179] env[62923]: DEBUG oslo_vmware.api [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.959729] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370585, 'name': CreateSnapshot_Task, 'duration_secs': 0.62097} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.960011] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Created Snapshot of the VM instance {{(pid=62923) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1139.960765] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29ec9fe-7f26-4e96-bc57-340661e8c659 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.026289] env[62923]: ERROR nova.scheduler.client.report [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [req-4f3c8f30-a56b-402a-be21-f730567f1242] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a513b783-544c-421b-85ec-cfd6d6ee698d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4f3c8f30-a56b-402a-be21-f730567f1242"}]} [ 1140.041234] env[62923]: DEBUG nova.scheduler.client.report [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Refreshing inventories for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1140.054081] env[62923]: DEBUG nova.scheduler.client.report [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating ProviderTree inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1140.054308] env[62923]: DEBUG nova.compute.provider_tree [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 147, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.064667] env[62923]: DEBUG nova.scheduler.client.report [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Refreshing aggregate associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, aggregates: None {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1140.082506] env[62923]: DEBUG nova.scheduler.client.report [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Refreshing trait associations for resource provider a513b783-544c-421b-85ec-cfd6d6ee698d, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62923) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1140.145155] env[62923]: DEBUG nova.network.neutron [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Port 9ee90698-8589-4858-8ef7-47e64099ac79 binding to destination host cpu-1 is already ACTIVE {{(pid=62923) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1140.183155] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd09284-3236-48a9-ba9d-0b8aa89efe07 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.191769] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11d3916-14f3-44b3-98f6-bc141a2fbf27 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.225123] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c61d034-3f81-4077-a714-5b14ee9a299f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.233197] env[62923]: DEBUG oslo_vmware.api [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370586, 'name': ReconfigVM_Task, 'duration_secs': 0.264853} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.235159] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1140.239745] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80989e14-3422-4949-bcbe-7f8a913bcd4d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.250078] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a9134f-600d-43f3-85bb-2cfff79c3d98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.266041] env[62923]: DEBUG nova.compute.provider_tree [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.270338] env[62923]: DEBUG oslo_vmware.api [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1140.270338] env[62923]: value = "task-1370587" [ 1140.270338] env[62923]: _type = "Task" [ 1140.270338] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.281379] env[62923]: DEBUG oslo_vmware.api [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370587, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.478294] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Creating linked-clone VM from snapshot {{(pid=62923) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1140.478632] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d143013b-feef-4b13-8e1e-7bf2a4b86c34 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.487206] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1140.487206] env[62923]: value = "task-1370588" [ 1140.487206] env[62923]: _type = "Task" [ 1140.487206] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.495610] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370588, 'name': CloneVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.778302] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Didn't find any instances for network info cache update. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1140.784687] env[62923]: DEBUG oslo_vmware.api [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370587, 'name': ReconfigVM_Task, 'duration_secs': 0.144968} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.784981] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291551', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'name': 'volume-0b89c802-11be-4286-bf70-8fea6df7f9c7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0b89c802-11be-4286-bf70-8fea6df7f9c7', 'serial': '0b89c802-11be-4286-bf70-8fea6df7f9c7'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1140.803177] env[62923]: DEBUG nova.scheduler.client.report [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updated inventory for provider a513b783-544c-421b-85ec-cfd6d6ee698d with generation 155 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1140.803638] env[62923]: DEBUG nova.compute.provider_tree [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating resource provider a513b783-544c-421b-85ec-cfd6d6ee698d generation from 155 to 156 during operation: update_inventory {{(pid=62923) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1140.803638] env[62923]: DEBUG nova.compute.provider_tree [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Updating inventory in ProviderTree for provider a513b783-544c-421b-85ec-cfd6d6ee698d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1140.998012] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370588, 'name': CloneVM_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.166216] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.166485] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.166613] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.307944] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.988s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.308514] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Start building networks asynchronously for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1141.324029] env[62923]: DEBUG nova.objects.instance [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.499239] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370588, 'name': CloneVM_Task} progress is 95%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.812854] env[62923]: DEBUG nova.compute.utils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1141.814197] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Allocating IP information in the background. {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1141.815093] env[62923]: DEBUG nova.network.neutron [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] allocate_for_instance() {{(pid=62923) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1141.861623] env[62923]: DEBUG nova.policy [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '182e1b6f26ed401da24d07a85f993802', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '011a5ec25af44f92961be00f82c10c08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62923) authorize /opt/stack/nova/nova/policy.py:201}} [ 1141.939600] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.939925] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.940157] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.940346] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1141.999742] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370588, 'name': CloneVM_Task, 'duration_secs': 1.096707} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.000055] env[62923]: INFO nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Created linked-clone VM from snapshot [ 1142.000824] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee78f420-b601-44fd-9b84-f4c818124098 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.009063] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Uploading image 9e0c20c1-d4ae-4b44-af61-b80ec3136ded {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1142.033707] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1142.033707] env[62923]: value = "vm-291554" [ 1142.033707] env[62923]: _type = "VirtualMachine" [ 1142.033707] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1142.033972] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-684fdecd-a81f-47f2-ad4a-3121b35b4ec1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.041948] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease: (returnval){ [ 1142.041948] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ecde30-f4b3-a579-abb6-b52adfeb89c0" [ 1142.041948] env[62923]: _type = "HttpNfcLease" [ 1142.041948] env[62923]: } obtained for exporting VM: (result){ [ 1142.041948] env[62923]: value = "vm-291554" [ 1142.041948] env[62923]: _type = "VirtualMachine" [ 1142.041948] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1142.042205] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the lease: (returnval){ [ 1142.042205] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ecde30-f4b3-a579-abb6-b52adfeb89c0" [ 1142.042205] env[62923]: _type = "HttpNfcLease" [ 1142.042205] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1142.048449] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1142.048449] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ecde30-f4b3-a579-abb6-b52adfeb89c0" [ 1142.048449] env[62923]: _type = "HttpNfcLease" [ 1142.048449] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1142.134598] env[62923]: DEBUG nova.network.neutron [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Successfully created port: 2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1142.208953] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.209226] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.209441] env[62923]: DEBUG nova.network.neutron [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.309233] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.320683] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Start building block device mappings for instance. {{(pid=62923) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1142.331876] env[62923]: DEBUG oslo_concurrency.lockutils [None req-bc344763-ba84-4927-80ee-6e0c6a350bb0 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.254s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.333392] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.024s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.551483] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1142.551483] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ecde30-f4b3-a579-abb6-b52adfeb89c0" [ 1142.551483] env[62923]: _type = "HttpNfcLease" [ 1142.551483] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1142.551980] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1142.551980] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52ecde30-f4b3-a579-abb6-b52adfeb89c0" [ 1142.551980] env[62923]: _type = "HttpNfcLease" [ 1142.551980] env[62923]: }. {{(pid=62923) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1142.552457] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90ccdeb-5c2d-44e9-844b-55784bb20c95 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.559408] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d4886-f015-d35f-b197-e5d23114982d/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1142.559578] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d4886-f015-d35f-b197-e5d23114982d/disk-0.vmdk for reading. {{(pid=62923) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1142.646137] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4f395154-c893-4c09-a08a-62c347221b57 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.835882] env[62923]: INFO nova.compute.manager [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Detaching volume 0a9c8437-1886-446b-ade7-2bca0a7cf9e7 [ 1142.867521] env[62923]: INFO nova.virt.block_device [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Attempting to driver detach volume 0a9c8437-1886-446b-ade7-2bca0a7cf9e7 from mountpoint /dev/sdc [ 1142.867810] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1142.868880] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291552', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'name': 'volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'serial': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1142.869206] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df595ff2-b942-4a32-8d63-8f316c04599e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.893724] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239086ff-6e07-4025-a83d-bb1792a9680c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.901658] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64755f70-e2f7-4cab-b69b-5e10dc4bb106 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.925088] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaa1821-d1aa-4eeb-b2f2-f6f2c7dcdea1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.929988] env[62923]: DEBUG nova.network.neutron [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.941715] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] The volume has not been displaced from its original location: [datastore2] volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7/volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1142.947588] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfiguring VM instance instance-0000006c to detach disk 2002 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1142.949198] env[62923]: DEBUG oslo_concurrency.lockutils [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.952619] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-063cbd13-e253-4fef-b3bd-8e1133de2590 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.974162] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1142.974162] env[62923]: value = "task-1370590" [ 1142.974162] env[62923]: _type = "Task" [ 1142.974162] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.983640] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370590, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.331370] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Start spawning the instance on the hypervisor. {{(pid=62923) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1143.353885] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-29T11:59:43Z,direct_url=,disk_format='vmdk',id=cd84cf13-77b9-4bc1-bb15-31bece605a8e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='1c585f1c158c4b85b6a70271870de82d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-29T11:59:43Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1143.354456] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1143.354738] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1143.355091] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1143.355396] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1143.355663] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1143.356610] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1143.356610] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1143.356610] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1143.356887] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1143.358024] env[62923]: DEBUG nova.virt.hardware [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1143.358279] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807d6a33-dcc2-4ab0-869d-24a4cddcb818 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.367767] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32d10f4-5587-413f-8b60-d99fc5afa32f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.471663] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63142e9-ce61-4a66-8a1a-f43564202ced {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.485550] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d51f4c-00ea-4d27-8fe1-cb266107c266 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.488486] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370590, 'name': ReconfigVM_Task, 'duration_secs': 0.27908} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.488887] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Reconfigured VM instance instance-0000006c to detach disk 2002 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1143.494291] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f150fac-425c-47a5-9340-d0324cfcda13 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.516259] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1143.516259] env[62923]: value = "task-1370591" [ 1143.516259] env[62923]: _type = "Task" [ 1143.516259] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.524870] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370591, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.671201] env[62923]: DEBUG nova.compute.manager [req-9972b039-e9fa-48bf-ad66-4591e34a6038 req-bc0b1fa1-7bb4-4bed-84a3-cf36b23ce44a service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Received event network-vif-plugged-2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1143.671847] env[62923]: DEBUG oslo_concurrency.lockutils [req-9972b039-e9fa-48bf-ad66-4591e34a6038 req-bc0b1fa1-7bb4-4bed-84a3-cf36b23ce44a service nova] Acquiring lock "ecbf08d7-e908-4496-8820-b0239bb051b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.673257] env[62923]: DEBUG oslo_concurrency.lockutils [req-9972b039-e9fa-48bf-ad66-4591e34a6038 req-bc0b1fa1-7bb4-4bed-84a3-cf36b23ce44a service nova] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.673257] env[62923]: DEBUG oslo_concurrency.lockutils [req-9972b039-e9fa-48bf-ad66-4591e34a6038 req-bc0b1fa1-7bb4-4bed-84a3-cf36b23ce44a service nova] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.673257] env[62923]: DEBUG nova.compute.manager [req-9972b039-e9fa-48bf-ad66-4591e34a6038 req-bc0b1fa1-7bb4-4bed-84a3-cf36b23ce44a service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] No waiting events found dispatching network-vif-plugged-2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1143.673257] env[62923]: WARNING nova.compute.manager [req-9972b039-e9fa-48bf-ad66-4591e34a6038 req-bc0b1fa1-7bb4-4bed-84a3-cf36b23ce44a service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Received unexpected event network-vif-plugged-2bb3f758-42d1-4dc8-82ba-849b12e76fa4 for instance with vm_state building and task_state spawning. [ 1143.682031] env[62923]: DEBUG nova.network.neutron [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Successfully updated port: 2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1143.936400] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.940328] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.940673] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.940977] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.026490] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370591, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.184490] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.184790] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.184885] env[62923]: DEBUG nova.network.neutron [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.444335] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.444578] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.444757] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.444929] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1144.445896] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80f55fe-463f-420a-a72a-e14063ecb51b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.455078] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f44f84-d374-4861-91c7-ff5bf6cabd4c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.469561] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7508b0-7b1a-4c25-b42c-b30979dfe209 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.476304] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900a0123-38e8-46ef-aa8c-835bdf733232 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.506394] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180845MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1144.506607] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.506854] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.526200] env[62923]: DEBUG oslo_vmware.api [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370591, 'name': ReconfigVM_Task, 'duration_secs': 0.803244} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.526537] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291552', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'name': 'volume-0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc86897e-85d6-46ea-bf66-2df7c6ed8fa0', 'attached_at': '', 'detached_at': '', 'volume_id': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7', 'serial': '0a9c8437-1886-446b-ade7-2bca0a7cf9e7'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1144.601471] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe71cccf-d3e1-460b-9c38-21d8f3a262a2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.622101] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026e5846-fca4-420e-90ff-ef9c69721802 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.629140] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 83 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1144.718237] env[62923]: DEBUG nova.network.neutron [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Instance cache missing network info. {{(pid=62923) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1144.860868] env[62923]: DEBUG nova.network.neutron [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [{"id": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "address": "fa:16:3e:0b:bc:61", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f758-42", "ovs_interfaceid": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.960977] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.960977] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.071716] env[62923]: DEBUG nova.objects.instance [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'flavor' on Instance uuid bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.135758] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.136032] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5558bc7-f1a5-437c-99b7-1ead79c948f2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.143459] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1145.143459] env[62923]: value = "task-1370592" [ 1145.143459] env[62923]: _type = "Task" [ 1145.143459] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.151716] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.363563] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.363904] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Instance network_info: |[{"id": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "address": "fa:16:3e:0b:bc:61", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f758-42", "ovs_interfaceid": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62923) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1145.364374] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:bc:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4c7a041-8e34-47f9-8ea1-d2f29414fd9d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2bb3f758-42d1-4dc8-82ba-849b12e76fa4', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1145.371888] env[62923]: DEBUG oslo.service.loopingcall [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1145.372138] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1145.372363] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99b16b6f-198d-4b12-a449-07b026c6ee18 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.393170] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1145.393170] env[62923]: value = "task-1370593" [ 1145.393170] env[62923]: _type = "Task" [ 1145.393170] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.404547] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370593, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.463890] env[62923]: DEBUG nova.compute.utils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Using /dev/sd instead of None {{(pid=62923) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1145.515672] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Applying migration context for instance b943c8e1-68e0-4313-bde7-865ba05408b9 as it has an incoming, in-progress migration 5f292209-3bb9-49ed-8c52-c1b8dbf590f8. Migration status is post-migrating {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1145.516862] env[62923]: INFO nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating resource usage from migration 5f292209-3bb9-49ed-8c52-c1b8dbf590f8 [ 1145.533291] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.533435] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.533553] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b826c4d1-3e31-49da-8e16-8e512599912c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.533665] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d4bc3c6c-20ac-4714-8109-867a2f6292b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.533776] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance f0b447f4-7d0d-4a91-9e99-bf8fad24b750 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.533886] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Migration 5f292209-3bb9-49ed-8c52-c1b8dbf590f8 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1145.533996] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance b943c8e1-68e0-4313-bde7-865ba05408b9 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.534122] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance ecbf08d7-e908-4496-8820-b0239bb051b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.534314] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1145.534444] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1145.638174] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcad891-9f28-47ef-9e60-e1758d97eaab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.648222] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e7b197-ebaa-4a45-ad90-f43b20843929 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.655329] env[62923]: DEBUG oslo_vmware.api [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370592, 'name': PowerOnVM_Task, 'duration_secs': 0.448017} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.679747] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1145.679987] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-19ebe8ec-ee79-41e1-9a61-f632ecc36299 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance 'b943c8e1-68e0-4313-bde7-865ba05408b9' progress to 100 {{(pid=62923) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1145.684625] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aad611a-6a94-481e-b03b-8dede0dc17de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.691769] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced58718-f46b-40f9-af4e-38a67b91a391 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.697476] env[62923]: DEBUG nova.compute.manager [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Received event network-changed-2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1145.697666] env[62923]: DEBUG nova.compute.manager [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Refreshing instance network info cache due to event network-changed-2bb3f758-42d1-4dc8-82ba-849b12e76fa4. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1145.697878] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] Acquiring lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.698037] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] Acquired lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.698284] env[62923]: DEBUG nova.network.neutron [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Refreshing network info cache for port 2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.710982] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.903182] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370593, 'name': CreateVM_Task, 'duration_secs': 0.404151} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.903473] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1145.903986] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.904173] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.904493] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1145.904749] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8168e2bb-45a0-474a-a643-2cd56fcefdfd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.909132] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1145.909132] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5293a5c1-a2b1-f2c4-1ab3-26e4ae93e697" [ 1145.909132] env[62923]: _type = "Task" [ 1145.909132] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.916465] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5293a5c1-a2b1-f2c4-1ab3-26e4ae93e697, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.966437] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.078726] env[62923]: DEBUG oslo_concurrency.lockutils [None req-62051c32-0ec6-4af3-9125-0609587cec6f tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.746s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.213692] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1146.408758] env[62923]: DEBUG nova.network.neutron [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updated VIF entry in instance network info cache for port 2bb3f758-42d1-4dc8-82ba-849b12e76fa4. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1146.409366] env[62923]: DEBUG nova.network.neutron [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [{"id": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "address": "fa:16:3e:0b:bc:61", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f758-42", "ovs_interfaceid": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.164176] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.164535] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.164625] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.164815] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.165040] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.166855] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1147.167072] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.660s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.170685] env[62923]: DEBUG oslo_concurrency.lockutils [req-d5bb07ce-cd10-4bee-bf8d-363f55a2d5c1 req-339cf112-b3fd-400d-b0e9-4f3d11e9abe6 service nova] Releasing lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.171061] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5293a5c1-a2b1-f2c4-1ab3-26e4ae93e697, 'name': SearchDatastore_Task, 'duration_secs': 0.015359} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.171586] env[62923]: INFO nova.compute.manager [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Terminating instance [ 1147.175240] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.175482] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Processing image cd84cf13-77b9-4bc1-bb15-31bece605a8e {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1147.175704] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.175848] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.176033] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1147.176606] env[62923]: DEBUG nova.compute.manager [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1147.176788] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1147.177185] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee5c9a5e-5d52-4854-b22f-d6afc74efee7 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.180286] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff7137f-a886-457d-804a-a8e0dcb2fab2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.187790] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.188072] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef64c3e9-d240-4174-8f5d-c417be2d1fff {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.195267] env[62923]: DEBUG oslo_vmware.api [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1147.195267] env[62923]: value = "task-1370594" [ 1147.195267] env[62923]: _type = "Task" [ 1147.195267] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.200012] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1147.200239] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1147.201339] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5512cfd1-1b64-44c1-90e8-677f51d126c1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.206645] env[62923]: DEBUG oslo_vmware.api [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.209860] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1147.209860] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299c0a4-2805-d8b7-0264-e0d3d896a5c4" [ 1147.209860] env[62923]: _type = "Task" [ 1147.209860] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.218525] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299c0a4-2805-d8b7-0264-e0d3d896a5c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.678087] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.678393] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1147.678574] env[62923]: INFO nova.compute.manager [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Attaching volume 679d9384-ce44-4357-91e8-4f856866cff6 to /dev/sdb [ 1147.706106] env[62923]: DEBUG oslo_vmware.api [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370594, 'name': PowerOffVM_Task, 'duration_secs': 0.194501} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.706106] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1147.706106] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1147.706363] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d01887e3-e73a-42ed-bb67-c7bf5a8c40d5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.715027] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cb0a4b-9840-41ea-b3ed-15d762d0141a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.722839] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]5299c0a4-2805-d8b7-0264-e0d3d896a5c4, 'name': SearchDatastore_Task, 'duration_secs': 0.017316} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.725044] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645ab3ab-78c8-4d35-ab2e-722ec3292bd2 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.727512] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673711dc-d458-49f4-81c8-6cb800aa0266 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.733160] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1147.733160] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523b5fcb-9139-3eda-8ebf-ae008bebc24a" [ 1147.733160] env[62923]: _type = "Task" [ 1147.733160] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.741180] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523b5fcb-9139-3eda-8ebf-ae008bebc24a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.744624] env[62923]: DEBUG nova.virt.block_device [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updating existing volume attachment record: 675f7d25-993c-4251-a990-eaca05a74b95 {{(pid=62923) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1147.772113] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1147.772339] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1147.772522] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Deleting the datastore file [datastore2] bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1147.772799] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3d354df-ba74-4201-9b4a-9e009c45e0fe {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.779995] env[62923]: DEBUG oslo_vmware.api [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for the task: (returnval){ [ 1147.779995] env[62923]: value = "task-1370596" [ 1147.779995] env[62923]: _type = "Task" [ 1147.779995] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.787342] env[62923]: DEBUG oslo_vmware.api [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.173891] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.243221] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]523b5fcb-9139-3eda-8ebf-ae008bebc24a, 'name': SearchDatastore_Task, 'duration_secs': 0.010603} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.243497] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.243756] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] ecbf08d7-e908-4496-8820-b0239bb051b1/ecbf08d7-e908-4496-8820-b0239bb051b1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1148.244025] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3091917b-22e3-4929-bf69-2a99cd20c19d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.250949] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1148.250949] env[62923]: value = "task-1370598" [ 1148.250949] env[62923]: _type = "Task" [ 1148.250949] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.259888] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.288843] env[62923]: DEBUG oslo_vmware.api [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Task: {'id': task-1370596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157135} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.289214] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1148.289346] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1148.289489] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1148.289663] env[62923]: INFO nova.compute.manager [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1148.289909] env[62923]: DEBUG oslo.service.loopingcall [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1148.290119] env[62923]: DEBUG nova.compute.manager [-] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1148.290207] env[62923]: DEBUG nova.network.neutron [-] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1148.613149] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b943c8e1-68e0-4313-bde7-865ba05408b9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.613149] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.613149] env[62923]: DEBUG nova.compute.manager [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Going to confirm migration 6 {{(pid=62923) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1148.763410] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370598, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.091089] env[62923]: DEBUG nova.compute.manager [req-2a825d31-795a-4de6-a452-f38b98c9b090 req-c1751b28-0903-4191-b2a4-8c6f389a325a service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Received event network-vif-deleted-dea689f8-5e91-490e-980b-8025533b5e90 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1149.091204] env[62923]: INFO nova.compute.manager [req-2a825d31-795a-4de6-a452-f38b98c9b090 req-c1751b28-0903-4191-b2a4-8c6f389a325a service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Neutron deleted interface dea689f8-5e91-490e-980b-8025533b5e90; detaching it from the instance and deleting it from the info cache [ 1149.094620] env[62923]: DEBUG nova.network.neutron [req-2a825d31-795a-4de6-a452-f38b98c9b090 req-c1751b28-0903-4191-b2a4-8c6f389a325a service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.177345] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.177668] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquired lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.177758] env[62923]: DEBUG nova.network.neutron [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.177952] env[62923]: DEBUG nova.objects.instance [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'info_cache' on Instance uuid b943c8e1-68e0-4313-bde7-865ba05408b9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.264939] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74475} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.265158] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cd84cf13-77b9-4bc1-bb15-31bece605a8e/cd84cf13-77b9-4bc1-bb15-31bece605a8e.vmdk to [datastore2] ecbf08d7-e908-4496-8820-b0239bb051b1/ecbf08d7-e908-4496-8820-b0239bb051b1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1149.265384] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Extending root virtual disk to 1048576 {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1149.265658] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-954cfa9f-252b-4681-a0e8-f87149e247be {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.271936] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1149.271936] env[62923]: value = "task-1370599" [ 1149.271936] env[62923]: _type = "Task" [ 1149.271936] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.279399] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.568120] env[62923]: DEBUG nova.network.neutron [-] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.597215] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5042b9e0-8c75-4e30-ad95-80fa4f851846 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.606493] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef5940d-0706-485d-b1fd-bd3266dbf3db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.635365] env[62923]: DEBUG nova.compute.manager [req-2a825d31-795a-4de6-a452-f38b98c9b090 req-c1751b28-0903-4191-b2a4-8c6f389a325a service nova] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Detach interface failed, port_id=dea689f8-5e91-490e-980b-8025533b5e90, reason: Instance bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1149.781791] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.194655} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.782100] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Extended root virtual disk {{(pid=62923) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1149.782844] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a76da4b-e080-4529-9386-1d618a131b66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.803649] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] ecbf08d7-e908-4496-8820-b0239bb051b1/ecbf08d7-e908-4496-8820-b0239bb051b1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1149.803898] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3737b35-70c9-454c-a6a1-2ebdef937fab {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.823508] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1149.823508] env[62923]: value = "task-1370600" [ 1149.823508] env[62923]: _type = "Task" [ 1149.823508] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.830946] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370600, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.071308] env[62923]: INFO nova.compute.manager [-] [instance: bc86897e-85d6-46ea-bf66-2df7c6ed8fa0] Took 1.78 seconds to deallocate network for instance. [ 1150.333163] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370600, 'name': ReconfigVM_Task, 'duration_secs': 0.354399} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.333481] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Reconfigured VM instance instance-00000071 to attach disk [datastore2] ecbf08d7-e908-4496-8820-b0239bb051b1/ecbf08d7-e908-4496-8820-b0239bb051b1.vmdk or device None with type sparse {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.334085] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cce0c8a6-b346-4325-bf27-dabc7860caa5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.341601] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1150.341601] env[62923]: value = "task-1370602" [ 1150.341601] env[62923]: _type = "Task" [ 1150.341601] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.359077] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370602, 'name': Rename_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.410376] env[62923]: DEBUG nova.network.neutron [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [{"id": "9ee90698-8589-4858-8ef7-47e64099ac79", "address": "fa:16:3e:68:8a:2b", "network": {"id": "153666d4-e4e6-46d6-ab59-bb3bed798a1c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-244671858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d1cf5e642524949a8366bf54d00593e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e41070eb-3ac1-4ca9-a3d0-fd65893a97de", "external-id": "nsx-vlan-transportzone-596", "segmentation_id": 596, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ee90698-85", "ovs_interfaceid": "9ee90698-8589-4858-8ef7-47e64099ac79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.578516] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.578810] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.579082] env[62923]: DEBUG nova.objects.instance [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lazy-loading 'resources' on Instance uuid bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.851938] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370602, 'name': Rename_Task, 'duration_secs': 0.138805} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.852305] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1150.852591] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ef28b29-f726-431c-817f-f2b36e47eb3b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.858664] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1150.858664] env[62923]: value = "task-1370603" [ 1150.858664] env[62923]: _type = "Task" [ 1150.858664] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.866798] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.913707] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Releasing lock "refresh_cache-b943c8e1-68e0-4313-bde7-865ba05408b9" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.913978] env[62923]: DEBUG nova.objects.instance [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'migration_context' on Instance uuid b943c8e1-68e0-4313-bde7-865ba05408b9 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.199722] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f2772f-f268-49b0-8865-4feecfb95552 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.207837] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e25262-4472-41fc-821b-ac896f47775a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.238254] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814635f2-7ecc-418d-acc6-32940a31133a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.245916] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348a65ce-ea48-41cb-8e23-8fcb0270a5ea {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.259207] env[62923]: DEBUG nova.compute.provider_tree [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.368338] env[62923]: DEBUG oslo_vmware.api [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370603, 'name': PowerOnVM_Task, 'duration_secs': 0.468414} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.368738] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1151.368858] env[62923]: INFO nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Took 8.04 seconds to spawn the instance on the hypervisor. [ 1151.369144] env[62923]: DEBUG nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1151.369930] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68cd5cc-06fc-4abb-a112-2409d7c21187 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.417478] env[62923]: DEBUG nova.objects.base [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1151.418459] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595ca470-5561-4843-8a2b-616a0ccaf3a6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.439585] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-349b086a-6a4c-4813-a1a3-7c1b6f32208a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.444958] env[62923]: DEBUG oslo_vmware.api [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1151.444958] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c9eb26-64df-173a-89b6-98c819f082eb" [ 1151.444958] env[62923]: _type = "Task" [ 1151.444958] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.453011] env[62923]: DEBUG oslo_vmware.api [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c9eb26-64df-173a-89b6-98c819f082eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.762578] env[62923]: DEBUG nova.scheduler.client.report [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1151.885867] env[62923]: INFO nova.compute.manager [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Took 13.58 seconds to build instance. [ 1151.955612] env[62923]: DEBUG oslo_vmware.api [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52c9eb26-64df-173a-89b6-98c819f082eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009946} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.955898] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.267824] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.270708] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.315s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.296023] env[62923]: INFO nova.scheduler.client.report [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Deleted allocations for instance bc86897e-85d6-46ea-bf66-2df7c6ed8fa0 [ 1152.298343] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Volume attach. Driver type: vmdk {{(pid=62923) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1152.298692] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291556', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'name': 'volume-679d9384-ce44-4357-91e8-4f856866cff6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f0b447f4-7d0d-4a91-9e99-bf8fad24b750', 'attached_at': '', 'detached_at': '', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'serial': '679d9384-ce44-4357-91e8-4f856866cff6'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1152.299874] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e540f92-4407-4d5c-900a-bab4ee11707a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.321725] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2d03fd-f147-42af-ab96-0050071a657d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.347975] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] volume-679d9384-ce44-4357-91e8-4f856866cff6/volume-679d9384-ce44-4357-91e8-4f856866cff6.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.348368] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4146fbd-0625-4a9d-8fdd-0b112825ddb6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.367414] env[62923]: DEBUG oslo_vmware.api [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1152.367414] env[62923]: value = "task-1370604" [ 1152.367414] env[62923]: _type = "Task" [ 1152.367414] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.375720] env[62923]: DEBUG oslo_vmware.api [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370604, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.387701] env[62923]: DEBUG oslo_concurrency.lockutils [None req-a5523570-82f2-4a1d-9f7f-401fff4f5b44 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.099s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.729888] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d4886-f015-d35f-b197-e5d23114982d/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1152.730825] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5e2730-6de6-46da-b7f1-4530ec90d8a9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.737350] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d4886-f015-d35f-b197-e5d23114982d/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1152.737530] env[62923]: ERROR oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d4886-f015-d35f-b197-e5d23114982d/disk-0.vmdk due to incomplete transfer. [ 1152.737761] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9ca4798a-fc19-4768-88c2-419f58e7388e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.744402] env[62923]: DEBUG oslo_vmware.rw_handles [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527d4886-f015-d35f-b197-e5d23114982d/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1152.744615] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Uploaded image 9e0c20c1-d4ae-4b44-af61-b80ec3136ded to the Glance image server {{(pid=62923) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1152.747374] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Destroying the VM {{(pid=62923) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1152.747636] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8b60b005-5750-4085-903d-401063b8e9de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.753702] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1152.753702] env[62923]: value = "task-1370605" [ 1152.753702] env[62923]: _type = "Task" [ 1152.753702] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.762104] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370605, 'name': Destroy_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.812345] env[62923]: DEBUG nova.compute.manager [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Received event network-changed-2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1152.812345] env[62923]: DEBUG nova.compute.manager [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Refreshing instance network info cache due to event network-changed-2bb3f758-42d1-4dc8-82ba-849b12e76fa4. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1152.812345] env[62923]: DEBUG oslo_concurrency.lockutils [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] Acquiring lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.812636] env[62923]: DEBUG oslo_concurrency.lockutils [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] Acquired lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.812636] env[62923]: DEBUG nova.network.neutron [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Refreshing network info cache for port 2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1152.813973] env[62923]: DEBUG oslo_concurrency.lockutils [None req-52922fd4-3a96-444e-bc81-36fad2181d66 tempest-AttachVolumeTestJSON-1511203818 tempest-AttachVolumeTestJSON-1511203818-project-member] Lock "bc86897e-85d6-46ea-bf66-2df7c6ed8fa0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.649s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.877243] env[62923]: DEBUG oslo_vmware.api [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370604, 'name': ReconfigVM_Task, 'duration_secs': 0.500548} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.880118] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Reconfigured VM instance instance-0000006f to attach disk [datastore2] volume-679d9384-ce44-4357-91e8-4f856866cff6/volume-679d9384-ce44-4357-91e8-4f856866cff6.vmdk or device None with type thin {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.885266] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14e333c1-4bda-45d5-bfee-e24808ab6a8b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.903605] env[62923]: DEBUG oslo_vmware.api [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1152.903605] env[62923]: value = "task-1370606" [ 1152.903605] env[62923]: _type = "Task" [ 1152.903605] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.911477] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4c26f3-bd30-4749-b104-69de6edd6214 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.917842] env[62923]: DEBUG oslo_vmware.api [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.922519] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e81ee97-f5c2-4b61-9ba0-5ad5e8a90e68 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.955298] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bbda4d-f40d-4748-beb2-372fd9d9b1ef {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.963515] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d69bfe-14f3-417d-9cb3-eceacdd87de5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.978168] env[62923]: DEBUG nova.compute.provider_tree [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.262988] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370605, 'name': Destroy_Task, 'duration_secs': 0.372523} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.263379] env[62923]: INFO nova.virt.vmwareapi.vm_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Destroyed the VM [ 1153.263687] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deleting Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1153.263990] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1ee3f57b-0057-429c-9340-52af9694846c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.269942] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1153.269942] env[62923]: value = "task-1370608" [ 1153.269942] env[62923]: _type = "Task" [ 1153.269942] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.277328] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370608, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.414673] env[62923]: DEBUG oslo_vmware.api [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370606, 'name': ReconfigVM_Task, 'duration_secs': 0.145545} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.415055] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291556', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'name': 'volume-679d9384-ce44-4357-91e8-4f856866cff6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f0b447f4-7d0d-4a91-9e99-bf8fad24b750', 'attached_at': '', 'detached_at': '', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'serial': '679d9384-ce44-4357-91e8-4f856866cff6'} {{(pid=62923) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1153.481888] env[62923]: DEBUG nova.scheduler.client.report [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1153.526358] env[62923]: DEBUG nova.network.neutron [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updated VIF entry in instance network info cache for port 2bb3f758-42d1-4dc8-82ba-849b12e76fa4. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.526886] env[62923]: DEBUG nova.network.neutron [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [{"id": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "address": "fa:16:3e:0b:bc:61", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f758-42", "ovs_interfaceid": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.780231] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370608, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.029275] env[62923]: DEBUG oslo_concurrency.lockutils [req-a7d6e197-9e66-4f9a-926f-ca6a05a5e21b req-181ae627-38c9-47ed-a09f-dd259e9a6352 service nova] Releasing lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.284207] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370608, 'name': RemoveSnapshot_Task, 'duration_secs': 0.634827} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.284622] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deleted Snapshot of the VM instance {{(pid=62923) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1154.285055] env[62923]: DEBUG nova.compute.manager [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1154.286209] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2aba873-bc19-4a89-849b-5869f69cb1bd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.453082] env[62923]: DEBUG nova.objects.instance [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'flavor' on Instance uuid f0b447f4-7d0d-4a91-9e99-bf8fad24b750 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.492814] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.222s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.801420] env[62923]: INFO nova.compute.manager [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Shelve offloading [ 1154.802922] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.803183] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9cf53e23-1122-4ae0-b8f6-c45301bae256 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.810800] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1154.810800] env[62923]: value = "task-1370609" [ 1154.810800] env[62923]: _type = "Task" [ 1154.810800] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.817845] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.959535] env[62923]: DEBUG oslo_concurrency.lockutils [None req-6955bd4d-2fae-4d05-84e2-f054addeb559 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.281s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.054476] env[62923]: INFO nova.scheduler.client.report [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted allocation for migration 5f292209-3bb9-49ed-8c52-c1b8dbf590f8 [ 1155.170150] env[62923]: INFO nova.compute.manager [None req-d18c0047-7c13-4420-927e-97645e964df3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Get console output [ 1155.170511] env[62923]: WARNING nova.virt.vmwareapi.driver [None req-d18c0047-7c13-4420-927e-97645e964df3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] The console log is missing. Check your VSPC configuration [ 1155.324025] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] VM already powered off {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1155.324025] env[62923]: DEBUG nova.compute.manager [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1155.324025] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9077c2b-9908-4d52-9e12-888cbc9686ce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.329153] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.329471] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.329766] env[62923]: DEBUG nova.network.neutron [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.559829] env[62923]: DEBUG oslo_concurrency.lockutils [None req-68b458d6-0b0c-4a58-a9b3-af408cf142b3 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.947s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.628590] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.629116] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.055669] env[62923]: DEBUG nova.network.neutron [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.131746] env[62923]: INFO nova.compute.manager [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Detaching volume 679d9384-ce44-4357-91e8-4f856866cff6 [ 1156.165956] env[62923]: INFO nova.virt.block_device [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Attempting to driver detach volume 679d9384-ce44-4357-91e8-4f856866cff6 from mountpoint /dev/sdb [ 1156.166216] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1156.166407] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291556', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'name': 'volume-679d9384-ce44-4357-91e8-4f856866cff6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f0b447f4-7d0d-4a91-9e99-bf8fad24b750', 'attached_at': '', 'detached_at': '', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'serial': '679d9384-ce44-4357-91e8-4f856866cff6'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1156.167378] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d721ac-5238-48b6-a851-2e0348ca8042 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.189449] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a7f9d5-ff4b-46b4-adc3-afabb5f41824 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.197732] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8e7444-491b-49b2-81ac-bb7315ee9726 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.219669] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a15457c-9ae4-4c0c-b17b-ee143c1c6b66 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.234725] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] The volume has not been displaced from its original location: [datastore2] volume-679d9384-ce44-4357-91e8-4f856866cff6/volume-679d9384-ce44-4357-91e8-4f856866cff6.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1156.239855] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1156.240152] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42976653-35ba-4303-8658-9d5493e006ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.259084] env[62923]: DEBUG oslo_vmware.api [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1156.259084] env[62923]: value = "task-1370610" [ 1156.259084] env[62923]: _type = "Task" [ 1156.259084] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.270610] env[62923]: DEBUG oslo_vmware.api [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370610, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.558514] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.770939] env[62923]: DEBUG oslo_vmware.api [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370610, 'name': ReconfigVM_Task, 'duration_secs': 0.257405} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.771265] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1156.775789] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2dfe2b05-d409-49a1-9e46-5a0c286cd3db {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.788360] env[62923]: DEBUG nova.compute.manager [req-3e3abe0d-8df0-4fde-8ed2-a5ed79046907 req-7b3883a3-8be1-48db-8046-42c316d1bc8b service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-vif-unplugged-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1156.788579] env[62923]: DEBUG oslo_concurrency.lockutils [req-3e3abe0d-8df0-4fde-8ed2-a5ed79046907 req-7b3883a3-8be1-48db-8046-42c316d1bc8b service nova] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.788827] env[62923]: DEBUG oslo_concurrency.lockutils [req-3e3abe0d-8df0-4fde-8ed2-a5ed79046907 req-7b3883a3-8be1-48db-8046-42c316d1bc8b service nova] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.788998] env[62923]: DEBUG oslo_concurrency.lockutils [req-3e3abe0d-8df0-4fde-8ed2-a5ed79046907 req-7b3883a3-8be1-48db-8046-42c316d1bc8b service nova] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.789211] env[62923]: DEBUG nova.compute.manager [req-3e3abe0d-8df0-4fde-8ed2-a5ed79046907 req-7b3883a3-8be1-48db-8046-42c316d1bc8b service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] No waiting events found dispatching network-vif-unplugged-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1156.789360] env[62923]: WARNING nova.compute.manager [req-3e3abe0d-8df0-4fde-8ed2-a5ed79046907 req-7b3883a3-8be1-48db-8046-42c316d1bc8b service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received unexpected event network-vif-unplugged-9add9dea-2408-497b-982c-a558a1db59bc for instance with vm_state shelved and task_state shelving_offloading. [ 1156.792309] env[62923]: DEBUG oslo_vmware.api [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1156.792309] env[62923]: value = "task-1370612" [ 1156.792309] env[62923]: _type = "Task" [ 1156.792309] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.805020] env[62923]: DEBUG oslo_vmware.api [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.883156] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.884079] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bc2022-e49d-4e9e-8246-0cd8d6d21865 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.892352] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.892619] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0c00355-c222-4f19-a227-e42d1f3eacd8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.955927] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.956178] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.956364] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleting the datastore file [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.956639] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8adc6634-e3c7-4a8b-a070-2aca30d3dfee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.963955] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1156.963955] env[62923]: value = "task-1370614" [ 1156.963955] env[62923]: _type = "Task" [ 1156.963955] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.971693] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.302327] env[62923]: DEBUG oslo_vmware.api [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370612, 'name': ReconfigVM_Task, 'duration_secs': 0.148542} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.302634] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291556', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'name': 'volume-679d9384-ce44-4357-91e8-4f856866cff6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f0b447f4-7d0d-4a91-9e99-bf8fad24b750', 'attached_at': '', 'detached_at': '', 'volume_id': '679d9384-ce44-4357-91e8-4f856866cff6', 'serial': '679d9384-ce44-4357-91e8-4f856866cff6'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1157.474797] env[62923]: DEBUG oslo_vmware.api [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126278} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.475030] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.475247] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.475422] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.497012] env[62923]: INFO nova.scheduler.client.report [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted allocations for instance d4bc3c6c-20ac-4714-8109-867a2f6292b1 [ 1157.842282] env[62923]: DEBUG nova.objects.instance [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'flavor' on Instance uuid f0b447f4-7d0d-4a91-9e99-bf8fad24b750 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.001696] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.001968] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.002215] env[62923]: DEBUG nova.objects.instance [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'resources' on Instance uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.505052] env[62923]: DEBUG nova.objects.instance [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'numa_topology' on Instance uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.813697] env[62923]: DEBUG nova.compute.manager [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-changed-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1158.813921] env[62923]: DEBUG nova.compute.manager [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing instance network info cache due to event network-changed-9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1158.814150] env[62923]: DEBUG oslo_concurrency.lockutils [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.814309] env[62923]: DEBUG oslo_concurrency.lockutils [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.814472] env[62923]: DEBUG nova.network.neutron [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.849935] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ca77a82d-7935-419e-92e7-7860dec496a3 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.007027] env[62923]: DEBUG nova.objects.base [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1159.080790] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8397fb3e-db5a-4e42-ba27-340430715763 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.090042] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a20174c-5ddb-46b9-acbd-e5348ea46f6a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.121101] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b0e6fd-1846-4fe8-a176-501631a83203 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.129245] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee09462-7365-499f-8c1a-19087f8cc322 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.142622] env[62923]: DEBUG nova.compute.provider_tree [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.582616] env[62923]: DEBUG nova.network.neutron [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updated VIF entry in instance network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.583661] env[62923]: DEBUG nova.network.neutron [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap9add9dea-24", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.646074] env[62923]: DEBUG nova.scheduler.client.report [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1159.861029] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.861392] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.861618] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.861810] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.862020] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.864488] env[62923]: INFO nova.compute.manager [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Terminating instance [ 1159.866339] env[62923]: DEBUG nova.compute.manager [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1159.866534] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1159.867444] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a46c56-9a11-4f8b-b632-4db8bcdde04a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.875280] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.875512] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfef927a-8c78-4607-8574-dea15a40ceae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.882882] env[62923]: DEBUG oslo_vmware.api [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1159.882882] env[62923]: value = "task-1370616" [ 1159.882882] env[62923]: _type = "Task" [ 1159.882882] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.891831] env[62923]: DEBUG oslo_vmware.api [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.086059] env[62923]: DEBUG oslo_concurrency.lockutils [req-5b1cbe02-2a44-403a-98d3-13805354116e req-dbdff172-599c-439b-bb11-ea3d85c76823 service nova] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.151444] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.393555] env[62923]: DEBUG oslo_vmware.api [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370616, 'name': PowerOffVM_Task, 'duration_secs': 0.38313} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.393874] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.394142] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1160.394445] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-948cd7b2-eb67-4d5b-a2af-2aa850aef925 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.439716] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.458701] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1160.458949] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1160.459116] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleting the datastore file [datastore1] f0b447f4-7d0d-4a91-9e99-bf8fad24b750 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1160.459392] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f406ed3b-3fed-4792-8337-f4f9f5fcd8c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.468207] env[62923]: DEBUG oslo_vmware.api [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for the task: (returnval){ [ 1160.468207] env[62923]: value = "task-1370618" [ 1160.468207] env[62923]: _type = "Task" [ 1160.468207] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.476552] env[62923]: DEBUG oslo_vmware.api [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.660336] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ef113cc4-6cde-4651-a4ba-ca2253cd1805 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.275s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.660762] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.221s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.660953] env[62923]: INFO nova.compute.manager [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Unshelving [ 1160.977955] env[62923]: DEBUG oslo_vmware.api [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Task: {'id': task-1370618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138933} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.978329] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.978517] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.978597] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.978751] env[62923]: INFO nova.compute.manager [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1160.979012] env[62923]: DEBUG oslo.service.loopingcall [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1160.979214] env[62923]: DEBUG nova.compute.manager [-] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1160.979323] env[62923]: DEBUG nova.network.neutron [-] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1161.426971] env[62923]: DEBUG nova.compute.manager [req-ab8d3320-7321-4d60-8e91-e22c95f71d12 req-afe4c0b0-76ef-43eb-a338-5ba595272679 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Received event network-vif-deleted-fef6cf51-9164-425e-8951-263bb2427ad4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1161.427183] env[62923]: INFO nova.compute.manager [req-ab8d3320-7321-4d60-8e91-e22c95f71d12 req-afe4c0b0-76ef-43eb-a338-5ba595272679 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Neutron deleted interface fef6cf51-9164-425e-8951-263bb2427ad4; detaching it from the instance and deleting it from the info cache [ 1161.427397] env[62923]: DEBUG nova.network.neutron [req-ab8d3320-7321-4d60-8e91-e22c95f71d12 req-afe4c0b0-76ef-43eb-a338-5ba595272679 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.691250] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.691538] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.691754] env[62923]: DEBUG nova.objects.instance [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'pci_requests' on Instance uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.906179] env[62923]: DEBUG nova.network.neutron [-] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.929739] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98344d50-01b6-4e11-8fe5-152173ff7da6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.940244] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc984e6-b718-4aa3-962c-a1d1bc4774dc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.968253] env[62923]: DEBUG nova.compute.manager [req-ab8d3320-7321-4d60-8e91-e22c95f71d12 req-afe4c0b0-76ef-43eb-a338-5ba595272679 service nova] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Detach interface failed, port_id=fef6cf51-9164-425e-8951-263bb2427ad4, reason: Instance f0b447f4-7d0d-4a91-9e99-bf8fad24b750 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1162.195209] env[62923]: DEBUG nova.objects.instance [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'numa_topology' on Instance uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.409556] env[62923]: INFO nova.compute.manager [-] [instance: f0b447f4-7d0d-4a91-9e99-bf8fad24b750] Took 1.43 seconds to deallocate network for instance. [ 1162.697593] env[62923]: INFO nova.compute.claims [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1162.915711] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.788138] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c941bb30-be7e-40bc-9a41-a191c38e5a04 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.797592] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7afcae2-b445-4ada-93f3-961adaef7acc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.831957] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6032bf25-2f9c-43e8-ad98-6487094ba70e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.839456] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f04c3b-a926-4278-8537-eeae18798d46 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.853411] env[62923]: DEBUG nova.compute.provider_tree [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1164.356900] env[62923]: DEBUG nova.scheduler.client.report [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1164.862373] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.171s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.867163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.951s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.867163] env[62923]: DEBUG nova.objects.instance [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lazy-loading 'resources' on Instance uuid f0b447f4-7d0d-4a91-9e99-bf8fad24b750 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.894442] env[62923]: INFO nova.network.neutron [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating port 9add9dea-2408-497b-982c-a558a1db59bc with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1165.464763] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d35369c-44f3-46dd-88f7-dfeb8e5780b0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.473425] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff32aa2-425b-4d8d-98a7-4ba5a3cf5c92 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.503703] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52be157f-0e02-4f73-b469-4cfda60d8f54 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.510917] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7006704-68bd-4b6c-9828-c6ba43fda8a5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.524950] env[62923]: DEBUG nova.compute.provider_tree [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.028457] env[62923]: DEBUG nova.scheduler.client.report [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1166.289086] env[62923]: DEBUG nova.compute.manager [req-81c831c9-bbd9-4994-9722-005b3acb2a01 req-86c854bf-0b36-45ff-bd3d-0fc4149cb9d5 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-vif-plugged-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1166.289355] env[62923]: DEBUG oslo_concurrency.lockutils [req-81c831c9-bbd9-4994-9722-005b3acb2a01 req-86c854bf-0b36-45ff-bd3d-0fc4149cb9d5 service nova] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.290326] env[62923]: DEBUG oslo_concurrency.lockutils [req-81c831c9-bbd9-4994-9722-005b3acb2a01 req-86c854bf-0b36-45ff-bd3d-0fc4149cb9d5 service nova] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.290540] env[62923]: DEBUG oslo_concurrency.lockutils [req-81c831c9-bbd9-4994-9722-005b3acb2a01 req-86c854bf-0b36-45ff-bd3d-0fc4149cb9d5 service nova] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.290725] env[62923]: DEBUG nova.compute.manager [req-81c831c9-bbd9-4994-9722-005b3acb2a01 req-86c854bf-0b36-45ff-bd3d-0fc4149cb9d5 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] No waiting events found dispatching network-vif-plugged-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1166.290886] env[62923]: WARNING nova.compute.manager [req-81c831c9-bbd9-4994-9722-005b3acb2a01 req-86c854bf-0b36-45ff-bd3d-0fc4149cb9d5 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received unexpected event network-vif-plugged-9add9dea-2408-497b-982c-a558a1db59bc for instance with vm_state shelved_offloaded and task_state spawning. [ 1166.386405] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.386405] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.386405] env[62923]: DEBUG nova.network.neutron [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1166.533192] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.551975] env[62923]: INFO nova.scheduler.client.report [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Deleted allocations for instance f0b447f4-7d0d-4a91-9e99-bf8fad24b750 [ 1167.058856] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ba8b0a0a-e515-44b8-8ac5-a8e6e1b88f63 tempest-AttachVolumeNegativeTest-339867505 tempest-AttachVolumeNegativeTest-339867505-project-member] Lock "f0b447f4-7d0d-4a91-9e99-bf8fad24b750" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.197s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.128774] env[62923]: DEBUG nova.network.neutron [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.631920] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.656809] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-29T12:00:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='134de95a69e9c5189308c60a7146d6ee',container_format='bare',created_at=2024-10-29T12:12:07Z,direct_url=,disk_format='vmdk',id=9e0c20c1-d4ae-4b44-af61-b80ec3136ded,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-833099273-shelved',owner='418b805157a74173b5cfe13ea5b61c13',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-10-29T12:12:23Z,virtual_size=,visibility=), allow threads: False {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1167.657096] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1167.657531] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image limits 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.657531] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Flavor pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1167.657651] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Image pref 0:0:0 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.657740] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62923) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1167.657946] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1167.658151] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1167.658343] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Got 1 possible topologies {{(pid=62923) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1167.658508] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1167.658677] env[62923]: DEBUG nova.virt.hardware [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62923) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1167.659823] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0f0078-f094-4791-989f-c27a46907ce5 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.667720] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17324775-1fe0-4e32-89fb-995da5679c8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.680539] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:3a:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9add9dea-2408-497b-982c-a558a1db59bc', 'vif_model': 'vmxnet3'}] {{(pid=62923) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1167.687838] env[62923]: DEBUG oslo.service.loopingcall [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1167.688080] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Creating VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1167.688298] env[62923]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52661b0b-7b34-48b4-aed7-6a0629a1fb7d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.708172] env[62923]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1167.708172] env[62923]: value = "task-1370620" [ 1167.708172] env[62923]: _type = "Task" [ 1167.708172] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.713574] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370620, 'name': CreateVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.216354] env[62923]: DEBUG oslo_vmware.api [-] Task: {'id': task-1370620, 'name': CreateVM_Task, 'duration_secs': 0.276095} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.216664] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Created VM on the ESX host {{(pid=62923) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1168.217240] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.217410] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.217791] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1168.218061] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04357a07-c700-4058-9b19-dece297121d4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.222686] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1168.222686] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521bd1a6-1e5d-c4f2-b1b1-c364da761c17" [ 1168.222686] env[62923]: _type = "Task" [ 1168.222686] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.231631] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]521bd1a6-1e5d-c4f2-b1b1-c364da761c17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.318754] env[62923]: DEBUG nova.compute.manager [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-changed-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1168.318956] env[62923]: DEBUG nova.compute.manager [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing instance network info cache due to event network-changed-9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1168.319241] env[62923]: DEBUG oslo_concurrency.lockutils [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.319423] env[62923]: DEBUG oslo_concurrency.lockutils [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.319559] env[62923]: DEBUG nova.network.neutron [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Refreshing network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.733196] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.733405] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Processing image 9e0c20c1-d4ae-4b44-af61-b80ec3136ded {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1168.733724] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.733907] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.734134] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.734417] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-932c7b5b-5c87-404f-a474-400fb8c058b8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.750925] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.751113] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62923) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1168.751790] env[62923]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc5f5601-5475-4743-9918-007580a69c8a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.756279] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1168.756279] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e45916-1cb7-05f8-b3ac-85a31dffaaf3" [ 1168.756279] env[62923]: _type = "Task" [ 1168.756279] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.764137] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52e45916-1cb7-05f8-b3ac-85a31dffaaf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.028227] env[62923]: DEBUG nova.network.neutron [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updated VIF entry in instance network info cache for port 9add9dea-2408-497b-982c-a558a1db59bc. {{(pid=62923) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.028619] env[62923]: DEBUG nova.network.neutron [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.266364] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Preparing fetch location {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1169.266718] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Fetch image to [datastore1] OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c/OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c.vmdk {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1169.266770] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Downloading stream optimized image 9e0c20c1-d4ae-4b44-af61-b80ec3136ded to [datastore1] OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c/OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c.vmdk on the data store datastore1 as vApp {{(pid=62923) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1169.266913] env[62923]: DEBUG nova.virt.vmwareapi.images [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Downloading image file data 9e0c20c1-d4ae-4b44-af61-b80ec3136ded to the ESX as VM named 'OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c' {{(pid=62923) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1169.332996] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1169.332996] env[62923]: value = "resgroup-9" [ 1169.332996] env[62923]: _type = "ResourcePool" [ 1169.332996] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1169.333323] env[62923]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-8fa608cc-9c9c-480b-b3e8-3121d6a74e14 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.353439] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease: (returnval){ [ 1169.353439] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98083-7348-e575-f16d-c1e47dde1693" [ 1169.353439] env[62923]: _type = "HttpNfcLease" [ 1169.353439] env[62923]: } obtained for vApp import into resource pool (val){ [ 1169.353439] env[62923]: value = "resgroup-9" [ 1169.353439] env[62923]: _type = "ResourcePool" [ 1169.353439] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1169.353728] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the lease: (returnval){ [ 1169.353728] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98083-7348-e575-f16d-c1e47dde1693" [ 1169.353728] env[62923]: _type = "HttpNfcLease" [ 1169.353728] env[62923]: } to be ready. {{(pid=62923) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1169.359298] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1169.359298] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98083-7348-e575-f16d-c1e47dde1693" [ 1169.359298] env[62923]: _type = "HttpNfcLease" [ 1169.359298] env[62923]: } is initializing. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1169.531277] env[62923]: DEBUG oslo_concurrency.lockutils [req-3f731be0-825a-48c2-b072-01d12fbef7f0 req-7ea5e86f-9342-47ad-b394-66d7525fde97 service nova] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.861848] env[62923]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1169.861848] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98083-7348-e575-f16d-c1e47dde1693" [ 1169.861848] env[62923]: _type = "HttpNfcLease" [ 1169.861848] env[62923]: } is ready. {{(pid=62923) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1169.862152] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1169.862152] env[62923]: value = "session[5244b6ab-0c96-bd20-37e8-7b54ff9edcb5]52b98083-7348-e575-f16d-c1e47dde1693" [ 1169.862152] env[62923]: _type = "HttpNfcLease" [ 1169.862152] env[62923]: }. {{(pid=62923) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1169.862855] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80c37ec-8e45-4455-a0d7-bcf358876bcc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.869528] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ffdca4-4fd5-f750-0c5c-a07625e0293d/disk-0.vmdk from lease info. {{(pid=62923) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1169.869697] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ffdca4-4fd5-f750-0c5c-a07625e0293d/disk-0.vmdk. {{(pid=62923) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1169.933245] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f3a87824-6066-4479-af5e-176c8bf2ae2c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.953230] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Completed reading data from the image iterator. {{(pid=62923) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1170.953569] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ffdca4-4fd5-f750-0c5c-a07625e0293d/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1170.954481] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb82d04-b043-4942-a0a1-60f4523921d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.961470] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ffdca4-4fd5-f750-0c5c-a07625e0293d/disk-0.vmdk is in state: ready. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1170.961601] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ffdca4-4fd5-f750-0c5c-a07625e0293d/disk-0.vmdk. {{(pid=62923) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1170.961828] env[62923]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6082a639-17ec-4127-a2a1-22b71445e327 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.334743] env[62923]: DEBUG oslo_vmware.rw_handles [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ffdca4-4fd5-f750-0c5c-a07625e0293d/disk-0.vmdk. {{(pid=62923) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1171.334995] env[62923]: INFO nova.virt.vmwareapi.images [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Downloaded image file data 9e0c20c1-d4ae-4b44-af61-b80ec3136ded [ 1171.335958] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4879937d-987f-4525-9481-fb1809b3f4ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.351807] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85fbde24-b0b2-462e-be0e-fc4a6a370202 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.382494] env[62923]: INFO nova.virt.vmwareapi.images [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] The imported VM was unregistered [ 1171.384631] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Caching image {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1171.384870] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1171.385180] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8bd3dd8-16a0-4a26-9299-8aab12f59eb8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.404418] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Created directory with path [datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded {{(pid=62923) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1171.404713] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c/OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c.vmdk to [datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk. {{(pid=62923) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1171.405152] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-59b56378-abf2-4cac-b2e5-08d0f404bc5b {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.412513] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1171.412513] env[62923]: value = "task-1370624" [ 1171.412513] env[62923]: _type = "Task" [ 1171.412513] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.423158] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370624, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.926567] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370624, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.426250] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370624, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.928841] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370624, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.427612] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370624, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.927155] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370624, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.14605} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.927312] env[62923]: INFO nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c/OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c.vmdk to [datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk. [ 1173.927505] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Cleaning up location [datastore1] OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c {{(pid=62923) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1173.927726] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_570cb7a1-c93f-4ed1-9646-c4a33fab301c {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1173.928022] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf6a0962-b753-4fa5-b45d-9447af44f4b9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.937459] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1173.937459] env[62923]: value = "task-1370626" [ 1173.937459] env[62923]: _type = "Task" [ 1173.937459] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.944935] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.447106] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033696} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.447419] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1174.447592] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.447828] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk to [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1174.448086] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baeb4064-c403-4ef7-bcad-38d65d2b0d8c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.454195] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1174.454195] env[62923]: value = "task-1370627" [ 1174.454195] env[62923]: _type = "Task" [ 1174.454195] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.461364] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.965853] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370627, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.467321] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370627, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.966793] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370627, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.468489] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370627, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.968084] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370627, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.161456} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.968487] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9e0c20c1-d4ae-4b44-af61-b80ec3136ded/9e0c20c1-d4ae-4b44-af61-b80ec3136ded.vmdk to [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk {{(pid=62923) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1176.969360] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09463d7f-6959-415d-8010-bcc35295e026 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.990666] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk or device None with type streamOptimized {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1176.990933] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32cc4522-8d25-4baa-bb58-70e2583193f3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.010594] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1177.010594] env[62923]: value = "task-1370628" [ 1177.010594] env[62923]: _type = "Task" [ 1177.010594] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.017898] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.521286] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370628, 'name': ReconfigVM_Task, 'duration_secs': 0.259946} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.521654] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Reconfigured VM instance instance-0000006e to attach disk [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1/d4bc3c6c-20ac-4714-8109-867a2f6292b1.vmdk or device None with type streamOptimized {{(pid=62923) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1177.522128] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78b29b56-65f3-4469-9089-3c0bcb35d7ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.528367] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1177.528367] env[62923]: value = "task-1370629" [ 1177.528367] env[62923]: _type = "Task" [ 1177.528367] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.535574] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370629, 'name': Rename_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.038302] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370629, 'name': Rename_Task, 'duration_secs': 0.135295} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.038595] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1178.038848] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a283c539-7ed9-4233-b77d-7a05958294d0 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.045804] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1178.045804] env[62923]: value = "task-1370630" [ 1178.045804] env[62923]: _type = "Task" [ 1178.045804] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.054279] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.556151] env[62923]: DEBUG oslo_vmware.api [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370630, 'name': PowerOnVM_Task, 'duration_secs': 0.461237} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.556151] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1178.656047] env[62923]: DEBUG nova.compute.manager [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1178.657058] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab46fa17-7abd-4d87-a6ae-272fb3b9ad7a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.174980] env[62923]: DEBUG oslo_concurrency.lockutils [None req-261acb63-2e5b-4c8c-ba70-a0c4a68680a0 tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.513s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.009560] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b943c8e1-68e0-4313-bde7-865ba05408b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.009922] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.010079] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.010256] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.010422] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.012862] env[62923]: INFO nova.compute.manager [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Terminating instance [ 1182.014747] env[62923]: DEBUG nova.compute.manager [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1182.014958] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.015252] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-251f9c8a-bf66-4666-8f9c-87acb8ca3c5f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.022123] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1182.022123] env[62923]: value = "task-1370631" [ 1182.022123] env[62923]: _type = "Task" [ 1182.022123] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.030181] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370631, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.532174] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370631, 'name': PowerOffVM_Task, 'duration_secs': 0.167723} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.532454] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1182.532651] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Volume detach. Driver type: vmdk {{(pid=62923) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1182.532841] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291548', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'name': 'volume-2bdd0528-e981-4226-a215-481044e8f3b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'b943c8e1-68e0-4313-bde7-865ba05408b9', 'attached_at': '2024-10-29T12:12:14.000000', 'detached_at': '', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'serial': '2bdd0528-e981-4226-a215-481044e8f3b2'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1182.533594] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03323f1f-4c8b-4e20-8cb1-064c1c2116c6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.551143] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b1928f-0492-40d3-acad-700bab523155 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.557335] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ac5bc3-3d3c-45f6-a4e1-33713c51aa62 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.574649] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d85e7b-f2b0-41bc-b45b-27406eb36a1d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.588186] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] The volume has not been displaced from its original location: [datastore2] volume-2bdd0528-e981-4226-a215-481044e8f3b2/volume-2bdd0528-e981-4226-a215-481044e8f3b2.vmdk. No consolidation needed. {{(pid=62923) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1182.593203] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1182.593462] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b708c170-aca3-4f0f-b378-a96fa92db511 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.610291] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1182.610291] env[62923]: value = "task-1370632" [ 1182.610291] env[62923]: _type = "Task" [ 1182.610291] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.617681] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.119961] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370632, 'name': ReconfigVM_Task, 'duration_secs': 0.162436} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.120363] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62923) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1183.124809] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e08c4a2-1197-409f-a694-0bd045279bca {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.139435] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1183.139435] env[62923]: value = "task-1370633" [ 1183.139435] env[62923]: _type = "Task" [ 1183.139435] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.147135] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.649022] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370633, 'name': ReconfigVM_Task, 'duration_secs': 0.132546} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.649358] env[62923]: DEBUG nova.virt.vmwareapi.volumeops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-291548', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'name': 'volume-2bdd0528-e981-4226-a215-481044e8f3b2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'b943c8e1-68e0-4313-bde7-865ba05408b9', 'attached_at': '2024-10-29T12:12:14.000000', 'detached_at': '', 'volume_id': '2bdd0528-e981-4226-a215-481044e8f3b2', 'serial': '2bdd0528-e981-4226-a215-481044e8f3b2'} {{(pid=62923) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1183.649645] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1183.650465] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13ac479-0785-40ae-91a8-3d69fb5818e1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.656578] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1183.656804] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-261f7fab-064d-4360-972a-6fe859cbb0e4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.733277] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1183.733502] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1183.733690] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore2] b943c8e1-68e0-4313-bde7-865ba05408b9 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1183.733959] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d753e7e-7fd9-49f3-bc4d-914f607dd478 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.741603] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1183.741603] env[62923]: value = "task-1370635" [ 1183.741603] env[62923]: _type = "Task" [ 1183.741603] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.749061] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.253109] env[62923]: DEBUG oslo_vmware.api [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082562} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.253608] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1184.253608] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1184.253724] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1184.253862] env[62923]: INFO nova.compute.manager [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1184.254126] env[62923]: DEBUG oslo.service.loopingcall [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1184.254330] env[62923]: DEBUG nova.compute.manager [-] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1184.254426] env[62923]: DEBUG nova.network.neutron [-] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1184.686102] env[62923]: DEBUG nova.compute.manager [req-439619a0-2d8d-4a93-8c34-c32e1330ea21 req-8fb3fc1b-5cbb-4300-acfe-a355883f5972 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Received event network-vif-deleted-9ee90698-8589-4858-8ef7-47e64099ac79 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1184.686102] env[62923]: INFO nova.compute.manager [req-439619a0-2d8d-4a93-8c34-c32e1330ea21 req-8fb3fc1b-5cbb-4300-acfe-a355883f5972 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Neutron deleted interface 9ee90698-8589-4858-8ef7-47e64099ac79; detaching it from the instance and deleting it from the info cache [ 1184.686102] env[62923]: DEBUG nova.network.neutron [req-439619a0-2d8d-4a93-8c34-c32e1330ea21 req-8fb3fc1b-5cbb-4300-acfe-a355883f5972 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.164029] env[62923]: DEBUG nova.network.neutron [-] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.188409] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca628aa2-2b39-4ea6-8c65-84225ab9b339 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.197989] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa9b8bc-cf82-4f79-bb58-941b48f389c8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.227139] env[62923]: DEBUG nova.compute.manager [req-439619a0-2d8d-4a93-8c34-c32e1330ea21 req-8fb3fc1b-5cbb-4300-acfe-a355883f5972 service nova] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Detach interface failed, port_id=9ee90698-8589-4858-8ef7-47e64099ac79, reason: Instance b943c8e1-68e0-4313-bde7-865ba05408b9 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1185.667264] env[62923]: INFO nova.compute.manager [-] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Took 1.41 seconds to deallocate network for instance. [ 1186.211242] env[62923]: INFO nova.compute.manager [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Took 0.54 seconds to detach 1 volumes for instance. [ 1186.213449] env[62923]: DEBUG nova.compute.manager [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b943c8e1-68e0-4313-bde7-865ba05408b9] Deleting volume: 2bdd0528-e981-4226-a215-481044e8f3b2 {{(pid=62923) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1186.750101] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.750490] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.750574] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.774567] env[62923]: INFO nova.scheduler.client.report [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted allocations for instance b943c8e1-68e0-4313-bde7-865ba05408b9 [ 1187.283210] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c16ad72f-6be1-4907-a8e9-50fdb6b2bdf8 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b943c8e1-68e0-4313-bde7-865ba05408b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.272s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.563511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b826c4d1-3e31-49da-8e16-8e512599912c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.563511] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b826c4d1-3e31-49da-8e16-8e512599912c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.563712] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "b826c4d1-3e31-49da-8e16-8e512599912c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.563897] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b826c4d1-3e31-49da-8e16-8e512599912c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.564085] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b826c4d1-3e31-49da-8e16-8e512599912c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.566335] env[62923]: INFO nova.compute.manager [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Terminating instance [ 1187.568087] env[62923]: DEBUG nova.compute.manager [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1187.568287] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1187.569127] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c58f8c-8391-4ddd-a80b-16c09c89d5ad {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.576868] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1187.577107] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec43cb2f-fe7a-41ae-b18f-d91cd1d3bb46 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.583353] env[62923]: DEBUG oslo_vmware.api [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1187.583353] env[62923]: value = "task-1370637" [ 1187.583353] env[62923]: _type = "Task" [ 1187.583353] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.590352] env[62923]: DEBUG oslo_vmware.api [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.093395] env[62923]: DEBUG oslo_vmware.api [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370637, 'name': PowerOffVM_Task, 'duration_secs': 0.173134} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.093787] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1188.093835] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1188.094062] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd2adddf-43b0-42f7-94b3-c30adc977d6a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.156972] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1188.157264] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1188.157475] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore1] b826c4d1-3e31-49da-8e16-8e512599912c {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1188.157752] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-240fc69c-e69a-4cd0-8864-bb99d5f8b21e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.164351] env[62923]: DEBUG oslo_vmware.api [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1188.164351] env[62923]: value = "task-1370639" [ 1188.164351] env[62923]: _type = "Task" [ 1188.164351] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.171726] env[62923]: DEBUG oslo_vmware.api [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.674261] env[62923]: DEBUG oslo_vmware.api [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12779} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.674529] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.674715] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.674895] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.675084] env[62923]: INFO nova.compute.manager [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1188.675388] env[62923]: DEBUG oslo.service.loopingcall [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1188.675603] env[62923]: DEBUG nova.compute.manager [-] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1188.675714] env[62923]: DEBUG nova.network.neutron [-] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.900261] env[62923]: DEBUG nova.compute.manager [req-c0898f16-b915-40ef-a8d6-5b2eb14cd877 req-b32f0e10-6452-4a11-97f7-6eb6ff5240ed service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Received event network-vif-deleted-3b76277b-efa1-43eb-908f-60a7e2a9f7ad {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1188.900465] env[62923]: INFO nova.compute.manager [req-c0898f16-b915-40ef-a8d6-5b2eb14cd877 req-b32f0e10-6452-4a11-97f7-6eb6ff5240ed service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Neutron deleted interface 3b76277b-efa1-43eb-908f-60a7e2a9f7ad; detaching it from the instance and deleting it from the info cache [ 1188.900638] env[62923]: DEBUG nova.network.neutron [req-c0898f16-b915-40ef-a8d6-5b2eb14cd877 req-b32f0e10-6452-4a11-97f7-6eb6ff5240ed service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.379577] env[62923]: DEBUG nova.network.neutron [-] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.404548] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2a70ed0-92f4-415b-acc1-143f295f4888 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.413501] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fbc08d-2d5f-473c-8078-0f42d664b93c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.436770] env[62923]: DEBUG nova.compute.manager [req-c0898f16-b915-40ef-a8d6-5b2eb14cd877 req-b32f0e10-6452-4a11-97f7-6eb6ff5240ed service nova] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Detach interface failed, port_id=3b76277b-efa1-43eb-908f-60a7e2a9f7ad, reason: Instance b826c4d1-3e31-49da-8e16-8e512599912c could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1189.882441] env[62923]: INFO nova.compute.manager [-] [instance: b826c4d1-3e31-49da-8e16-8e512599912c] Took 1.21 seconds to deallocate network for instance. [ 1190.031901] env[62923]: DEBUG oslo_concurrency.lockutils [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "ecbf08d7-e908-4496-8820-b0239bb051b1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.032172] env[62923]: DEBUG oslo_concurrency.lockutils [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.032416] env[62923]: DEBUG nova.compute.manager [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1190.033341] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2c20a9-1164-46ac-95e6-c307a7696fba {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.040326] env[62923]: DEBUG nova.compute.manager [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62923) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1190.040915] env[62923]: DEBUG nova.objects.instance [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'flavor' on Instance uuid ecbf08d7-e908-4496-8820-b0239bb051b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.388817] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.389125] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.389358] env[62923]: DEBUG nova.objects.instance [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'resources' on Instance uuid b826c4d1-3e31-49da-8e16-8e512599912c {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.546347] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1190.546604] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6606a77b-351b-4c1c-9c13-7f9a8a00ccf8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.553723] env[62923]: DEBUG oslo_vmware.api [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1190.553723] env[62923]: value = "task-1370640" [ 1190.553723] env[62923]: _type = "Task" [ 1190.553723] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.562726] env[62923]: DEBUG oslo_vmware.api [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.952490] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cca304-9d4d-4cf1-9de8-635623513908 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.959769] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794d2357-441b-4d37-8aca-b913a7d822bb {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.989475] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e028768-1126-4768-8b67-0c83fa67bf6c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.995908] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6383635a-6120-429c-8a13-f75975365795 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.008172] env[62923]: DEBUG nova.compute.provider_tree [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.061889] env[62923]: DEBUG oslo_vmware.api [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370640, 'name': PowerOffVM_Task, 'duration_secs': 0.202043} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.062156] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1191.062339] env[62923]: DEBUG nova.compute.manager [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1191.063043] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d2487a-a4b5-4948-b569-cd2d3c14e2f6 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.511630] env[62923]: DEBUG nova.scheduler.client.report [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1191.573234] env[62923]: DEBUG oslo_concurrency.lockutils [None req-977b3cf9-8352-47b5-9403-2b4c42121b19 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.541s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.017223] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.038302] env[62923]: INFO nova.scheduler.client.report [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted allocations for instance b826c4d1-3e31-49da-8e16-8e512599912c [ 1192.397316] env[62923]: DEBUG nova.objects.instance [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'flavor' on Instance uuid ecbf08d7-e908-4496-8820-b0239bb051b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.545159] env[62923]: DEBUG oslo_concurrency.lockutils [None req-5d2a668e-3294-4879-bc9b-d1b9f87a8905 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "b826c4d1-3e31-49da-8e16-8e512599912c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.981s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.903134] env[62923]: DEBUG oslo_concurrency.lockutils [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1192.903312] env[62923]: DEBUG oslo_concurrency.lockutils [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.903485] env[62923]: DEBUG nova.network.neutron [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.903665] env[62923]: DEBUG nova.objects.instance [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'info_cache' on Instance uuid ecbf08d7-e908-4496-8820-b0239bb051b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.407496] env[62923]: DEBUG nova.objects.base [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62923) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1194.122239] env[62923]: DEBUG nova.network.neutron [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [{"id": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "address": "fa:16:3e:0b:bc:61", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f758-42", "ovs_interfaceid": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.625382] env[62923]: DEBUG oslo_concurrency.lockutils [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.128811] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powering on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1195.129216] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3820978e-f8c7-47cd-9e63-5ef36f389dae {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.137308] env[62923]: DEBUG oslo_vmware.api [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1195.137308] env[62923]: value = "task-1370642" [ 1195.137308] env[62923]: _type = "Task" [ 1195.137308] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.144788] env[62923]: DEBUG oslo_vmware.api [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.647267] env[62923]: DEBUG oslo_vmware.api [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370642, 'name': PowerOnVM_Task, 'duration_secs': 0.386826} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.647555] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powered on the VM {{(pid=62923) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1195.647747] env[62923]: DEBUG nova.compute.manager [None req-07fbcad8-ec9d-4a2b-9167-6c8f8290b1aa tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1195.648612] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1098e3bc-bae7-4a98-b8b6-ee8a72f2bfb4 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.167883] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "534fa654-ed73-4518-bdc7-d1f981628fd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.168288] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.168343] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "534fa654-ed73-4518-bdc7-d1f981628fd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.168494] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.168663] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.170330] env[62923]: INFO nova.compute.manager [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Terminating instance [ 1196.172201] env[62923]: DEBUG nova.compute.manager [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1196.172393] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.173218] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d66376-3195-4b3a-b95f-3260b6eee4de {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.181382] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.181605] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-660822b5-f090-43b9-a139-2c782081e029 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.188196] env[62923]: DEBUG oslo_vmware.api [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1196.188196] env[62923]: value = "task-1370643" [ 1196.188196] env[62923]: _type = "Task" [ 1196.188196] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.197603] env[62923]: DEBUG oslo_vmware.api [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.698231] env[62923]: DEBUG oslo_vmware.api [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370643, 'name': PowerOffVM_Task, 'duration_secs': 0.183273} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.698500] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.698673] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.698929] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-556b1f99-e224-4799-846f-5e523327d29e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.759256] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.759536] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.759735] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleting the datastore file [datastore1] 534fa654-ed73-4518-bdc7-d1f981628fd8 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.760018] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cc3d2f0-5c98-4251-8b15-e15761333398 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.766250] env[62923]: DEBUG oslo_vmware.api [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for the task: (returnval){ [ 1196.766250] env[62923]: value = "task-1370645" [ 1196.766250] env[62923]: _type = "Task" [ 1196.766250] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.773880] env[62923]: DEBUG oslo_vmware.api [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.940268] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1196.940450] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Starting heal instance info cache {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1196.940567] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Rebuilding the list of instances to heal {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.276557] env[62923]: DEBUG oslo_vmware.api [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Task: {'id': task-1370645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128459} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.276916] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.277013] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1197.277200] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1197.277408] env[62923]: INFO nova.compute.manager [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1197.277658] env[62923]: DEBUG oslo.service.loopingcall [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1197.277863] env[62923]: DEBUG nova.compute.manager [-] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1197.277958] env[62923]: DEBUG nova.network.neutron [-] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.337332] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f1a6df-9671-4f59-8fd1-b32d698e57fc {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.344436] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Suspending the VM {{(pid=62923) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1197.344670] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b7e5eac4-6e9c-40ae-876d-4d1bfde2bff9 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.351610] env[62923]: DEBUG oslo_vmware.api [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1197.351610] env[62923]: value = "task-1370646" [ 1197.351610] env[62923]: _type = "Task" [ 1197.351610] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.359195] env[62923]: DEBUG oslo_vmware.api [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370646, 'name': SuspendVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.443852] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Skipping network cache update for instance because it is being deleted. {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1197.477305] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.477471] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquired lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.477621] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Forcefully refreshing network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1197.477780] env[62923]: DEBUG nova.objects.instance [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lazy-loading 'info_cache' on Instance uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.755494] env[62923]: DEBUG nova.compute.manager [req-d327091c-e0a9-4362-9f09-caa248bea7c9 req-ee8d5c6b-95d7-40e1-8563-09381e2f39b2 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Received event network-vif-deleted-e5f5c80e-b51d-4788-a346-d4ff5982fa57 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1197.755742] env[62923]: INFO nova.compute.manager [req-d327091c-e0a9-4362-9f09-caa248bea7c9 req-ee8d5c6b-95d7-40e1-8563-09381e2f39b2 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Neutron deleted interface e5f5c80e-b51d-4788-a346-d4ff5982fa57; detaching it from the instance and deleting it from the info cache [ 1197.755893] env[62923]: DEBUG nova.network.neutron [req-d327091c-e0a9-4362-9f09-caa248bea7c9 req-ee8d5c6b-95d7-40e1-8563-09381e2f39b2 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.862311] env[62923]: DEBUG oslo_vmware.api [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370646, 'name': SuspendVM_Task} progress is 66%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.231484] env[62923]: DEBUG nova.network.neutron [-] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.259271] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33f5696c-4fc9-4706-89ef-fedc7dc7db19 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.269294] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742f18e6-d806-475f-83ea-ef453519468c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.291768] env[62923]: DEBUG nova.compute.manager [req-d327091c-e0a9-4362-9f09-caa248bea7c9 req-ee8d5c6b-95d7-40e1-8563-09381e2f39b2 service nova] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Detach interface failed, port_id=e5f5c80e-b51d-4788-a346-d4ff5982fa57, reason: Instance 534fa654-ed73-4518-bdc7-d1f981628fd8 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1198.361813] env[62923]: DEBUG oslo_vmware.api [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370646, 'name': SuspendVM_Task, 'duration_secs': 0.582232} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.362087] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Suspended the VM {{(pid=62923) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1198.362272] env[62923]: DEBUG nova.compute.manager [None req-cca7b9ea-42b2-4185-8960-1e063558ec99 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1198.363013] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f45ff653-29b5-4c1d-a032-6106cdf2c193 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.734398] env[62923]: INFO nova.compute.manager [-] [instance: 534fa654-ed73-4518-bdc7-d1f981628fd8] Took 1.46 seconds to deallocate network for instance. [ 1199.200884] env[62923]: DEBUG nova.network.neutron [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [{"id": "9add9dea-2408-497b-982c-a558a1db59bc", "address": "fa:16:3e:22:3a:73", "network": {"id": "1b68221f-8387-41e3-8ba2-362a35a21f3a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-51541053-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "418b805157a74173b5cfe13ea5b61c13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9add9dea-24", "ovs_interfaceid": "9add9dea-2408-497b-982c-a558a1db59bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.241081] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1199.241318] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1199.241544] env[62923]: DEBUG nova.objects.instance [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lazy-loading 'resources' on Instance uuid 534fa654-ed73-4518-bdc7-d1f981628fd8 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.676775] env[62923]: INFO nova.compute.manager [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Resuming [ 1199.677453] env[62923]: DEBUG nova.objects.instance [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'flavor' on Instance uuid ecbf08d7-e908-4496-8820-b0239bb051b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.702708] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Releasing lock "refresh_cache-d4bc3c6c-20ac-4714-8109-867a2f6292b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1199.702885] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updated the network info_cache for instance {{(pid=62923) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1199.826756] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5f23bb-b8c6-4294-8283-97ecbc717f77 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.834547] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530b267b-64b9-4ef6-a6f4-389ecc965873 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.864021] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6cc20f8-ff18-4e85-bb31-c479f71d8421 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.870718] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df859077-d26c-4ec5-8f3c-6d369054780e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.883191] env[62923]: DEBUG nova.compute.provider_tree [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.386576] env[62923]: DEBUG nova.scheduler.client.report [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1200.684948] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.685359] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquired lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.685359] env[62923]: DEBUG nova.network.neutron [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Building network info cache for instance {{(pid=62923) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.891117] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.650s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.908962] env[62923]: INFO nova.scheduler.client.report [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Deleted allocations for instance 534fa654-ed73-4518-bdc7-d1f981628fd8 [ 1201.416670] env[62923]: DEBUG oslo_concurrency.lockutils [None req-d961c3cc-2fcb-4cf4-87d7-a771d71a8d23 tempest-ServerActionsTestOtherA-777156528 tempest-ServerActionsTestOtherA-777156528-project-member] Lock "534fa654-ed73-4518-bdc7-d1f981628fd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.248s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.420940] env[62923]: DEBUG nova.network.neutron [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [{"id": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "address": "fa:16:3e:0b:bc:61", "network": {"id": "b64fd0ff-c4e0-4925-977d-b35d47a86b41", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-2036370647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "011a5ec25af44f92961be00f82c10c08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4c7a041-8e34-47f9-8ea1-d2f29414fd9d", "external-id": "nsx-vlan-transportzone-553", "segmentation_id": 553, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2bb3f758-42", "ovs_interfaceid": "2bb3f758-42d1-4dc8-82ba-849b12e76fa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.924786] env[62923]: DEBUG oslo_concurrency.lockutils [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Releasing lock "refresh_cache-ecbf08d7-e908-4496-8820-b0239bb051b1" {{(pid=62923) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.925102] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b564e4d-7c1b-45d1-b54a-0e33ccd41865 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.932882] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Resuming the VM {{(pid=62923) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1201.932882] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-636912c0-9cf5-48a7-9ec4-fd433be7e359 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.940214] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.940379] env[62923]: DEBUG nova.compute.manager [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62923) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1201.940681] env[62923]: DEBUG oslo_vmware.api [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1201.940681] env[62923]: value = "task-1370647" [ 1201.940681] env[62923]: _type = "Task" [ 1201.940681] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.951809] env[62923]: DEBUG oslo_vmware.api [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370647, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.450909] env[62923]: DEBUG oslo_vmware.api [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370647, 'name': PowerOnVM_Task, 'duration_secs': 0.509718} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.451307] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Resumed the VM {{(pid=62923) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1202.451421] env[62923]: DEBUG nova.compute.manager [None req-44bb9f95-b984-4530-b7d6-20b98b5c206b tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Checking state {{(pid=62923) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1202.452213] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a934577-5787-40f4-ba08-d7c61701e8ec {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.940617] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.313389] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "ecbf08d7-e908-4496-8820-b0239bb051b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.313609] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.313822] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "ecbf08d7-e908-4496-8820-b0239bb051b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.314038] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.314218] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.316422] env[62923]: INFO nova.compute.manager [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Terminating instance [ 1203.318236] env[62923]: DEBUG nova.compute.manager [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1203.318589] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1203.319458] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b6ac55c-47cb-4912-9588-967abffdac58 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.326932] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1203.327181] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffdca3e3-3e6d-46b8-a00b-fcf1c75f8fbf {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.333497] env[62923]: DEBUG oslo_vmware.api [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1203.333497] env[62923]: value = "task-1370648" [ 1203.333497] env[62923]: _type = "Task" [ 1203.333497] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.341009] env[62923]: DEBUG oslo_vmware.api [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.844065] env[62923]: DEBUG oslo_vmware.api [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370648, 'name': PowerOffVM_Task, 'duration_secs': 0.180201} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.844360] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.844535] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.844784] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c18c0cf-129f-4b5a-b13e-a858eea9f935 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.935590] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.938069] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.938300] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Deleting contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.938494] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleting the datastore file [datastore2] ecbf08d7-e908-4496-8820-b0239bb051b1 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.938997] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5d3c52c-a117-488e-9c81-43d5087ceb4c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.945416] env[62923]: DEBUG oslo_vmware.api [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for the task: (returnval){ [ 1203.945416] env[62923]: value = "task-1370650" [ 1203.945416] env[62923]: _type = "Task" [ 1203.945416] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.954698] env[62923]: DEBUG oslo_vmware.api [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.443060] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.443060] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.443060] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1204.455153] env[62923]: DEBUG oslo_vmware.api [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Task: {'id': task-1370650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.407761} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.456125] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.456477] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Deleted contents of the VM from datastore datastore2 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1204.456792] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1204.457093] env[62923]: INFO nova.compute.manager [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1204.457439] env[62923]: DEBUG oslo.service.loopingcall [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1204.457963] env[62923]: DEBUG nova.compute.manager [-] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1204.458167] env[62923]: DEBUG nova.network.neutron [-] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1204.890833] env[62923]: DEBUG nova.compute.manager [req-b26da4a3-901e-4bc0-8b6d-e947b2f70635 req-c34db307-8a6c-42f7-b13a-08ecf03389cf service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Received event network-vif-deleted-2bb3f758-42d1-4dc8-82ba-849b12e76fa4 {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1204.891046] env[62923]: INFO nova.compute.manager [req-b26da4a3-901e-4bc0-8b6d-e947b2f70635 req-c34db307-8a6c-42f7-b13a-08ecf03389cf service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Neutron deleted interface 2bb3f758-42d1-4dc8-82ba-849b12e76fa4; detaching it from the instance and deleting it from the info cache [ 1204.891219] env[62923]: DEBUG nova.network.neutron [req-b26da4a3-901e-4bc0-8b6d-e947b2f70635 req-c34db307-8a6c-42f7-b13a-08ecf03389cf service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.945125] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.945392] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.945608] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.945842] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62923) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1204.946650] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9c9f05-f4e6-4d56-965f-2470f888e457 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.954764] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd210d4-6086-4284-9a5c-0e2d0741b733 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.968162] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eec887a-af44-4624-8c1a-69d3bce23d9a {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.974524] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bd5a65-1bc8-48c3-a065-f0722188f3ee {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.005064] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180365MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62923) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1205.005064] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.005064] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.370530] env[62923]: DEBUG nova.network.neutron [-] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.393507] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31d19563-65ff-44a7-b194-611ff65886a8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.403470] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c056430-aca4-4847-99d3-a684fcec1357 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.426853] env[62923]: DEBUG nova.compute.manager [req-b26da4a3-901e-4bc0-8b6d-e947b2f70635 req-c34db307-8a6c-42f7-b13a-08ecf03389cf service nova] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Detach interface failed, port_id=2bb3f758-42d1-4dc8-82ba-849b12e76fa4, reason: Instance ecbf08d7-e908-4496-8820-b0239bb051b1 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1205.873570] env[62923]: INFO nova.compute.manager [-] [instance: ecbf08d7-e908-4496-8820-b0239bb051b1] Took 1.42 seconds to deallocate network for instance. [ 1206.028462] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance ecbf08d7-e908-4496-8820-b0239bb051b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.028727] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Instance d4bc3c6c-20ac-4714-8109-867a2f6292b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62923) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.028783] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1206.028931] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62923) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1206.063151] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37121394-74cd-4108-9296-f687b818f9a3 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.071038] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad2a3fc-055e-4142-b482-bab38beeb580 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.100237] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de86663-cf95-4100-85df-bd9274bce065 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.107204] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5f503c-5ea2-4d7b-a98e-e50a1e5f04e1 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.120029] env[62923]: DEBUG nova.compute.provider_tree [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.380614] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.623665] env[62923]: DEBUG nova.scheduler.client.report [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1207.128305] env[62923]: DEBUG nova.compute.resource_tracker [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62923) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1207.128686] env[62923]: DEBUG oslo_concurrency.lockutils [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.124s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.128783] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.748s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.128987] env[62923]: DEBUG nova.objects.instance [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lazy-loading 'resources' on Instance uuid ecbf08d7-e908-4496-8820-b0239bb051b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1207.627359] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.627613] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.627781] env[62923]: DEBUG oslo_service.periodic_task [None req-c968e504-0189-4bd5-87e0-c6e6c4031282 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62923) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.668501] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48940c1d-8b7c-4e44-8b94-a7b7fef67dce {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.677482] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99cdb94-4b67-48fc-b801-e8626acd9ddd {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.705876] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55095479-393f-4025-b553-4c98d80f805e {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.712665] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e9e303-8b10-46a8-b8f0-9b55b8c7054f {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.725041] env[62923]: DEBUG nova.compute.provider_tree [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.228504] env[62923]: DEBUG nova.scheduler.client.report [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1208.734249] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.605s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.755532] env[62923]: INFO nova.scheduler.client.report [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Deleted allocations for instance ecbf08d7-e908-4496-8820-b0239bb051b1 [ 1209.263515] env[62923]: DEBUG oslo_concurrency.lockutils [None req-ad196d5d-66e0-4f36-be1a-6275a08d3093 tempest-ServerActionsTestJSON-1144686189 tempest-ServerActionsTestJSON-1144686189-project-member] Lock "ecbf08d7-e908-4496-8820-b0239bb051b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.950s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.356052] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.356052] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.356614] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.356614] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.356614] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.359204] env[62923]: INFO nova.compute.manager [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Terminating instance [ 1215.361019] env[62923]: DEBUG nova.compute.manager [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Start destroying the instance on the hypervisor. {{(pid=62923) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1215.361289] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Destroying instance {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1215.362208] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d52dc-54b0-4027-9562-4afea511a132 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.370100] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powering off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1215.370321] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50e3063b-82cc-4661-8dd4-1edab5a8d76d {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.377121] env[62923]: DEBUG oslo_vmware.api [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1215.377121] env[62923]: value = "task-1370651" [ 1215.377121] env[62923]: _type = "Task" [ 1215.377121] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.384984] env[62923]: DEBUG oslo_vmware.api [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.886424] env[62923]: DEBUG oslo_vmware.api [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370651, 'name': PowerOffVM_Task, 'duration_secs': 0.209742} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.886696] env[62923]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Powered off the VM {{(pid=62923) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1215.886866] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Unregistering the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1215.887135] env[62923]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90905a59-1eb4-4813-8641-baebf3556a12 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.024199] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Unregistered the VM {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1217.024587] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deleting contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1217.024628] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleting the datastore file [datastore1] d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1217.024887] env[62923]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-563163e5-ba5f-45c7-98e0-a7edb2597840 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.031656] env[62923]: DEBUG oslo_vmware.api [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for the task: (returnval){ [ 1217.031656] env[62923]: value = "task-1370653" [ 1217.031656] env[62923]: _type = "Task" [ 1217.031656] env[62923]: } to complete. {{(pid=62923) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.040165] env[62923]: DEBUG oslo_vmware.api [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.541012] env[62923]: DEBUG oslo_vmware.api [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Task: {'id': task-1370653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136966} completed successfully. {{(pid=62923) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.541229] env[62923]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted the datastore file {{(pid=62923) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1217.541408] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deleted contents of the VM from datastore datastore1 {{(pid=62923) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1217.541585] env[62923]: DEBUG nova.virt.vmwareapi.vmops [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Instance destroyed {{(pid=62923) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1217.541758] env[62923]: INFO nova.compute.manager [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1217.541991] env[62923]: DEBUG oslo.service.loopingcall [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62923) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1217.542204] env[62923]: DEBUG nova.compute.manager [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Deallocating network for instance {{(pid=62923) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1217.542295] env[62923]: DEBUG nova.network.neutron [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] deallocate_for_instance() {{(pid=62923) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1217.975223] env[62923]: DEBUG nova.compute.manager [req-99e8154b-9761-4ca0-bfc0-bc54504ac4c6 req-f76c134f-966b-49b4-af37-4885439af36a service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Received event network-vif-deleted-9add9dea-2408-497b-982c-a558a1db59bc {{(pid=62923) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1217.975223] env[62923]: INFO nova.compute.manager [req-99e8154b-9761-4ca0-bfc0-bc54504ac4c6 req-f76c134f-966b-49b4-af37-4885439af36a service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Neutron deleted interface 9add9dea-2408-497b-982c-a558a1db59bc; detaching it from the instance and deleting it from the info cache [ 1217.975223] env[62923]: DEBUG nova.network.neutron [req-99e8154b-9761-4ca0-bfc0-bc54504ac4c6 req-f76c134f-966b-49b4-af37-4885439af36a service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.447745] env[62923]: DEBUG nova.network.neutron [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Updating instance_info_cache with network_info: [] {{(pid=62923) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.477165] env[62923]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd615a2c-d0a2-442d-8819-0a6535773a50 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.487510] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb32495-475c-452f-9a69-f2bc441b771c {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.508711] env[62923]: DEBUG nova.compute.manager [req-99e8154b-9761-4ca0-bfc0-bc54504ac4c6 req-f76c134f-966b-49b4-af37-4885439af36a service nova] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Detach interface failed, port_id=9add9dea-2408-497b-982c-a558a1db59bc, reason: Instance d4bc3c6c-20ac-4714-8109-867a2f6292b1 could not be found. {{(pid=62923) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1218.951447] env[62923]: INFO nova.compute.manager [-] [instance: d4bc3c6c-20ac-4714-8109-867a2f6292b1] Took 1.41 seconds to deallocate network for instance. [ 1219.458059] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.458364] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.458542] env[62923]: DEBUG nova.objects.instance [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lazy-loading 'resources' on Instance uuid d4bc3c6c-20ac-4714-8109-867a2f6292b1 {{(pid=62923) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.992302] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd553efc-6b62-40a3-83cd-24ffbc454881 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.999760] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07e4a58-01f4-4260-acda-d7387c6d5359 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.028321] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066c93b0-ed2b-4e2d-bd14-07dd41577d98 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.034919] env[62923]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90834d16-491e-40c3-9970-d1efaae4a7f8 {{(pid=62923) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.047714] env[62923]: DEBUG nova.compute.provider_tree [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed in ProviderTree for provider: a513b783-544c-421b-85ec-cfd6d6ee698d {{(pid=62923) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1220.551215] env[62923]: DEBUG nova.scheduler.client.report [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Inventory has not changed for provider a513b783-544c-421b-85ec-cfd6d6ee698d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62923) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1221.056715] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.079486] env[62923]: INFO nova.scheduler.client.report [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Deleted allocations for instance d4bc3c6c-20ac-4714-8109-867a2f6292b1 [ 1221.587163] env[62923]: DEBUG oslo_concurrency.lockutils [None req-7ffbdd24-b218-4974-b266-1ed806272e5e tempest-AttachVolumeShelveTestJSON-908149708 tempest-AttachVolumeShelveTestJSON-908149708-project-member] Lock "d4bc3c6c-20ac-4714-8109-867a2f6292b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.231s {{(pid=62923) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}